Compare commits

..

62 Commits

Author SHA1 Message Date
Nikita Gubarkov
fd009fe88e Enumerate and filter physical devices, create a logical device. Corrections. 2023-04-05 20:32:21 +03:00
Nikita Gubarkov
738f762d3e Enumerate and filter physical devices, create a logical device. 2023-03-31 19:55:13 +03:00
Nikita Gubarkov
ef9172f50e Remove bundled Vulkan headers, use stubs when Vulkan is disabled. Corrections. 2023-03-31 12:17:36 +03:00
Nikita Gubarkov
ed8a7ad702 Remove bundled Vulkan headers, use stubs when Vulkan is disabled. 2023-03-30 21:53:47 +03:00
Alexey Ushakov
6238545ead Vulkan sdk support in configure. Corrections 2023-03-17 21:30:47 +01:00
Alexey Ushakov
9bda49c60c Vulkan sdk support in configure 2023-03-16 21:13:22 +01:00
Nikita Gubarkov
a8cdbbfb8d Added debug messenger. 2023-03-16 22:10:10 +02:00
Nikita Gubarkov
2d7c8037fa Decoupled Vulkan from Wayland.
Extracted protocol-independent logic into "share".
2023-03-16 20:57:18 +02:00
Nikita Gubarkov
b51d9559b0 Forgot to stage vk_video headers. 2023-03-16 18:10:04 +02:00
Nikita Gubarkov
f410ae50af Imported Vulkan headers, started using Vulkan-Hpp. 2023-03-16 18:05:38 +02:00
Alexey Ushakov
7ebdeb734b Added detection of Vulkan support. Corrections. 2023-03-16 11:21:15 +01:00
Alexey Ushakov
f7821f8d45 Added detection of Vulkan support 2023-03-15 19:29:22 +01:00
Maxim Kartashev
28a609691a Implemented GraphicsEnvironment and Device, HiDPI (scaling) support 2022-12-13 10:06:03 +03:00
Maxim Kartashev
2f47290c19 Dialog and window menu support 2022-11-28 10:57:38 +03:00
Maxim Kartashev
ba1c4a464d dlopen version 0 of xkbcommon if non-versioned file is missing 2022-11-23 13:19:15 +03:00
Dmitry Batrak
466f1dd0fd generate correct mouse events after click in window resize area 2022-11-23 11:17:30 +03:00
Dmitry Batrak
fd37168007 set cursor on pointer enter event, as per Wayland API requirement
in the initial implementation cursor was only updated on pointer move events
2022-11-18 19:51:17 +03:00
Dmitry Batrak
607dcb78a8 support setting mouse cursors 2022-11-18 14:22:21 +03:00
Maxim Kartashev
f9f5359629 Prevent deadlock when scrolling
SurfaceData need to be able to be locked twice during scrolling,
but the associated mutex wasn't recursive, which led to a deadlock.
2022-11-16 17:22:24 +03:00
Maxim Kartashev
6b19a747f7 Basic support for VolatileImage
The image is actually a non-volatile software implementation.
2022-11-15 11:52:49 +03:00
Maxim Kartashev
7d7d9f9bf5 Prevent race condition when destroying buffer manager
Also implemented AWT_LOCK() family of macros
2022-11-15 11:52:23 +03:00
Dmitry Batrak
66c28a3606 prevent crashes on concurrent access to AWT API 2022-11-14 19:12:31 +03:00
Maxim Kartashev
e2321b5594 Implemented getColorModel() and createAcceleratedImage()
This is enough to make J2Ddemo and StylePad work
2022-11-11 12:04:24 +03:00
Dmitry Batrak
5813b10e65 maximize/un-maximize improvements
* remove 'roundtrip' calls - they don't seem to be needed after recent changes to paint logic
* remove unneeded lock in WLFramePeer.setState - corresponding code doesn't query or modify any state
* always repaint client decorations on frame state change - it might not be accompanied by size change
* remember the size of frame before maximization, use it on de-maximization, if compositor doesn't propose a size itself
2022-11-09 16:18:28 +03:00
Maxim Kartashev
d121a93cb1 JBR-4918 More bugfixes in Wayland buffers management
Event-driven painting of client decorations.
Smooth window resize.
Transactional commits at AWT and Swing level
based on frame numbers.
2022-11-08 08:40:18 +03:00
Dmitry Batrak
f7638abee2 initialize memory allocated for WLFrame
just in case, to prevent potential usage of uninitialized fields in future
2022-11-02 12:41:08 +03:00
Dmitry Batrak
55b1310c24 support setting state to a window before making it visible, and right afterwards 2022-10-31 12:23:16 +03:00
Maxim Kartashev
bfe03f4bd1 Revert "JBR-4918 More bugfixes in Wayland buffers management"
This reverts commit 15a09a1564.
2022-10-28 13:20:27 +03:00
Maxim Kartashev
15a09a1564 JBR-4918 More bugfixes in Wayland buffers management
Event-driven painting of client decorations.
Smooth window resize.
Transactional commits at AWT and Swing level
based on frame numbers.
2022-10-28 11:28:57 +03:00
Dmitry Batrak
858380c36d fix assertion in WLKeyboardFocusManagerPeer 2022-10-21 18:34:02 +03:00
Dmitry Batrak
a81b44d79d client-side decorations, and some fixes for minimize/maximize window functionality 2022-10-21 16:46:36 +03:00
Dmitry Batrak
7f9aee3c7f make default component focused on frame activation 2022-10-21 12:05:01 +03:00
Maxim Kartashev
0478a24483 JBR-4918 Additional bugfixes in Wayland buffers management 2022-10-21 09:48:51 +03:00
Maxim Kartashev
c113772448 JBR-4865 Support xdg-shell functions
Implemented maximize/fullscreen together with the reverse functions.
2022-10-19 11:24:08 +03:00
Maxim Kartashev
757194800f JBR-4918 Implement support for window size change 2022-10-18 11:07:01 +03:00
Maxim Kartashev
b9c4ac35ec JBR-4865 Support xdg-shell functions 2022-10-18 11:06:57 +03:00
Dmitry Batrak
adf8d95f7b simplify Wayland events dispatching, fix known issues 2022-10-13 10:23:03 +03:00
Maxim Kartashev
b2986aef46 JBR-4621 Implemented key repeat 2022-10-12 14:23:09 +03:00
Maxim Kartashev
cea81933d9 JBR-4621 Input events support for Wayland
This includes basic mouse and keyboard support.
2022-10-12 14:23:09 +03:00
Maxim Kartashev
6779e2c59b Let WLToolkit work with DISPLAY unset 2022-10-12 14:23:06 +03:00
Alexey Ushakov
4b7c5f62a9 Improved sun.awt.wl.WLGraphicsEnvironment to support createCraphics() 2022-10-12 14:22:34 +03:00
Maxim Kartashev
df204bb882 Added libwakefield source code to the tree
It is not integrated into the build infrastructure both for simplicity
and to avoid otherwise unnecessary dependencies on weston, pixman, etc.

Also fixed copyrights in the recently added files, including the
auto-generated ones.
2022-10-12 14:22:34 +03:00
Maxim Kartashev
0d7fdcf415 Made it possible for Wayland tests to run in parallel
Also fixed a potential crash in getLocationOnScreen().
2022-10-12 14:22:34 +03:00
Maxim Kartashev
73c8c50262 Wayland test harness and sample test 2022-10-12 14:22:34 +03:00
Maxim Kartashev
16cacd0b55 AWT Robot to support Wayland natively
Requires the presence of the 'wakefield' protocol extension on the
server side; will throw UOE on use otherwise. Can be completely
disabled by undefining WAKEFIELD_ROBOT during compilation.

Provides the ability to re-position the surface to the given absolute
coordinates, query the surface's position, obtain RGB of a pixel at the
given absolute coordinates and take a screenshot of an area.
2022-10-12 14:22:34 +03:00
Nikita Gubarkov
19496fcef9 Suppress unused-result warning for libfontmanager 2022-10-12 14:22:34 +03:00
nikita.gubarkov
9cb4769361 Text rendering support
Extracted X11-related code from libfontmanager into libfontmanager_xawt
2022-10-12 14:22:33 +03:00
Maxim Kartashev
dc36d0afaf Reduced xdg_wm_base protocol version to 1 in order to run under Weston
This was done purely for convenience. The version can be bumped back up
at any time, but the change will require a more recent version
of Weston for testing.
2022-10-12 14:22:33 +03:00
Alexey Ushakov
20ca5a41f4 Added JFrame support 2022-10-12 14:22:33 +03:00
Alexey Ushakov
e864ea8469 Fixed child hw component position 2022-10-12 14:22:33 +03:00
Alexey Ushakov
b3e31866ec Implemented heavyweight button rendering 2022-10-12 14:22:33 +03:00
Alexey Ushakov
4cdce4b44a Moved native window management to WLComponentPeer 2022-10-12 14:22:33 +03:00
Alexey Ushakov
2187957e7e Added WLRepaintArea 2022-10-12 14:22:33 +03:00
Alexey Ushakov
14aa544c86 Refactored peers 2022-10-12 14:22:32 +03:00
Alexey Ushakov
de5214531a Added stubs for WLTK button peer 2022-10-12 14:22:32 +03:00
Alexey Ushakov
ea6f74d64f Added 2d surface support 2022-10-12 14:22:32 +03:00
Alexey Ushakov
56e174709b Added support for background color. Refactoring 2022-10-12 14:22:32 +03:00
Alexey Ushakov
c5103ff4a8 Make simple awt window visible 2022-10-12 14:22:32 +03:00
Dmitry Batrak
49c103709e window showing and event loop prototype 2022-10-12 14:22:32 +03:00
Dmitry Batrak
4b21d041d8 more stubbing for WLToolkit, add WLFramePeer 2022-10-12 14:22:31 +03:00
Dmitry Batrak
c1ee18adfb more stubbing for WLToolkit 2022-10-12 14:22:31 +03:00
Alexey Ushakov
693e16b0a1 Created stub version of WLToolkit
A wayland base toolkit with native part linked to wayland-client library
2022-10-12 14:22:28 +03:00
20190 changed files with 692900 additions and 1391484 deletions

View File

@@ -1,68 +0,0 @@
#
# Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
name: 'Build JTReg'
description: 'Build JTReg'
runs:
using: composite
steps:
- name: 'Get JTReg version configuration'
id: version
uses: ./.github/actions/config
with:
var: JTREG_VERSION
- name: 'Check cache for already built JTReg'
id: get-cached
uses: actions/cache@v4
with:
path: jtreg/installed
key: jtreg-${{ steps.version.outputs.value }}
- name: 'Checkout the JTReg source'
uses: actions/checkout@v4
with:
repository: openjdk/jtreg
ref: jtreg-${{ steps.version.outputs.value }}
path: jtreg/src
if: (steps.get-cached.outputs.cache-hit != 'true')
- name: 'Build JTReg'
run: |
# Build JTReg and move files to the proper locations
bash make/build.sh --jdk "$JAVA_HOME_17_X64"
mkdir ../installed
mv build/images/jtreg/* ../installed
working-directory: jtreg/src
shell: bash
if: (steps.get-cached.outputs.cache-hit != 'true')
- name: 'Upload JTReg artifact'
uses: actions/upload-artifact@v4
with:
name: bundles-jtreg-${{ steps.version.outputs.value }}
path: jtreg/installed
retention-days: 5

View File

@@ -42,5 +42,5 @@ runs:
run: |
# Extract value from configuration file
value="$(grep -h ${{ inputs.var }}= make/conf/github-actions.conf | cut -d '=' -f 2-)"
echo "value=$value" >> $GITHUB_OUTPUT
echo "::set-output name=value::$value"
shell: bash

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -42,7 +42,7 @@ runs:
- name: 'Build'
id: build
run: >
make -k LOG=info ${{ inputs.make-target }}
make LOG=info ${{ inputs.make-target }}
|| bash ./.github/scripts/gen-build-failure-report.sh "$GITHUB_STEP_SUMMARY"
shell: bash
@@ -61,12 +61,12 @@ runs:
$build_dir/make-support/failure-summary.log \
$build_dir/make-support/failure-logs/* \
failure-logs/ 2> /dev/null || true
echo 'failure=true' >> $GITHUB_OUTPUT
echo '::set-output name=failure::true'
fi
shell: bash
- name: 'Upload build logs'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: failure-logs-${{ inputs.platform }}${{ inputs.debug-suffix }}
path: failure-logs
@@ -74,7 +74,7 @@ runs:
# This is the best way I found to abort the job with an error message
- name: 'Notify about build failures'
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
script: core.setFailed('Build failed. See summary for details.')
if: steps.check.outputs.failure == 'true'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -42,7 +42,7 @@ runs:
run: |
# Convert platform name to upper case
platform_prefix="$(echo ${{ inputs.platform }} | tr [a-z-] [A-Z_])"
echo "value=$platform_prefix" >> $GITHUB_OUTPUT
echo "::set-output name=value::$platform_prefix"
shell: bash
- name: 'Get URL configuration'
@@ -65,7 +65,7 @@ runs:
- name: 'Check cache for BootJDK'
id: get-cached-bootjdk
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: bootjdk/jdk
key: boot-jdk-${{ inputs.platform }}-${{ steps.sha256.outputs.value }}
@@ -104,6 +104,6 @@ runs:
- name: 'Export path to where BootJDK is installed'
id: path-name
run: |
# Export the absolute path
echo "path=`pwd`/bootjdk/jdk" >> $GITHUB_OUTPUT
# Export the path
echo '::set-output name=path::bootjdk/jdk'
shell: bash

View File

@@ -48,14 +48,14 @@ runs:
steps:
- name: 'Download bundles artifact'
id: download-bundles
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: bundles-${{ inputs.platform }}${{ inputs.debug-suffix }}
path: bundles
continue-on-error: true
- name: 'Download bundles artifact (retry)'
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: bundles-${{ inputs.platform }}${{ inputs.debug-suffix }}
path: bundles
@@ -103,7 +103,7 @@ runs:
tests_dir="$(cygpath $tests_dir)"
fi
echo "jdk=$jdk_dir" >> $GITHUB_OUTPUT
echo "symbols=$symbols_dir" >> $GITHUB_OUTPUT
echo "tests=$tests_dir" >> $GITHUB_OUTPUT
echo "::set-output name=jdk::$jdk_dir"
echo "::set-output name=symbols::$symbols_dir"
echo "::set-output name=tests::$tests_dir"
shell: bash

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -40,15 +40,15 @@ runs:
var: GTEST_VERSION
- name: 'Checkout GTest source'
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: google/googletest
ref: 'v${{ steps.version.outputs.value }}'
ref: 'release-${{ steps.version.outputs.value }}'
path: gtest
- name: 'Export path to where GTest is installed'
id: path-name
run: |
# Export the path
echo 'path=gtest' >> $GITHUB_OUTPUT
echo '::set-output name=path::gtest'
shell: bash

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -24,7 +24,7 @@
#
name: 'Get JTReg'
description: 'Get JTReg'
description: 'Download JTReg from cache or source location'
outputs:
path:
description: 'Path to the installed JTReg'
@@ -39,16 +39,34 @@ runs:
with:
var: JTREG_VERSION
- name: 'Download JTReg artifact'
id: download-jtreg
uses: actions/download-artifact@v4
- name: 'Check cache for JTReg'
id: get-cached-jtreg
uses: actions/cache@v3
with:
name: bundles-jtreg-${{ steps.version.outputs.value }}
path: jtreg/installed
key: jtreg-${{ steps.version.outputs.value }}
- name: 'Checkout the JTReg source'
uses: actions/checkout@v3
with:
repository: openjdk/jtreg
ref: jtreg-${{ steps.version.outputs.value }}
path: jtreg/src
if: steps.get-cached-jtreg.outputs.cache-hit != 'true'
- name: 'Build JTReg'
run: |
# Build JTReg and move files to the proper locations
bash make/build.sh --jdk "$JAVA_HOME_11_X64"
mkdir ../installed
mv build/images/jtreg/* ../installed
working-directory: jtreg/src
shell: bash
if: steps.get-cached-jtreg.outputs.cache-hit != 'true'
- name: 'Export path to where JTReg is installed'
id: path-name
run: |
# Export the path
echo 'path=jtreg/installed' >> $GITHUB_OUTPUT
echo '::set-output name=path::jtreg/installed'
shell: bash

View File

@@ -30,15 +30,15 @@ runs:
using: composite
steps:
- name: 'Install MSYS2'
uses: msys2/setup-msys2@v2.22.0
uses: msys2/setup-msys2@v2
with:
install: 'autoconf tar unzip zip make'
path-type: minimal
location: ${{ runner.tool_cache }}/msys2
location: msys2
# We can't run bash until this is completed, so stick with pwsh
- name: 'Set MSYS2 path'
run: |
# Prepend msys2/msys64/usr/bin to the PATH
echo "$env:RUNNER_TOOL_CACHE/msys2/msys64/usr/bin" >> $env:GITHUB_PATH
echo "$env:GITHUB_WORKSPACE/msys2/msys64/usr/bin" >> $env:GITHUB_PATH
shell: pwsh

View File

@@ -62,16 +62,16 @@ runs:
fi
if [[ "$jdk_bundle_zip$jdk_bundle_tar_gz$symbols_bundle$tests_bundle" != "" ]]; then
echo 'bundles-found=true' >> $GITHUB_OUTPUT
echo '::set-output name=bundles-found::true'
else
echo 'bundles-found=false' >> $GITHUB_OUTPUT
echo '::set-output name=bundles-found::false'
fi
shell: bash
- name: 'Upload bundles artifact'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: bundles-${{ inputs.platform }}${{ inputs.debug-suffix }}
path: bundles
retention-days: 5
retention-days: 1
if: steps.bundles.outputs.bundles-found == 'true'

View File

@@ -24,19 +24,12 @@
# questions.
#
# Import common utils
. .github/scripts/report-utils.sh
GITHUB_STEP_SUMMARY="$1"
BUILD_DIR="$(ls -d build/*)"
# Send signal to the do-build action that we failed
touch "$BUILD_DIR/build-failure"
# Collect hs_errs for build-time crashes, e.g. javac, jmod, jlink, CDS.
# These usually land in make/
hs_err_files=$(ls make/hs_err*.log 2> /dev/null || true)
(
echo '### :boom: Build failure summary'
echo ''
@@ -53,20 +46,6 @@ hs_err_files=$(ls make/hs_err*.log 2> /dev/null || true)
echo '</details>'
echo ''
for hs_err in $hs_err_files; do
echo "<details><summary><b>View HotSpot error log: "$hs_err"</b></summary>"
echo ''
echo '```'
echo "$hs_err:"
echo ''
cat "$hs_err"
echo '```'
echo '</details>'
echo ''
done
echo ''
echo ':arrow_right: To see the entire test log, click the job in the list to the left. To download logs, see the `failure-logs` [artifact above](#artifacts).'
) >> $GITHUB_STEP_SUMMARY
truncate_summary

View File

@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2022, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -24,9 +24,6 @@
# questions.
#
# Import common utils
. .github/scripts/report-utils.sh
GITHUB_STEP_SUMMARY="$1"
test_suite_name=$(cat build/run-test-prebuilt/test-support/test-last-ids.txt)
@@ -47,8 +44,8 @@ for test in $failures $errors; do
base_path="$(echo "$test" | tr '#' '_')"
report_file="$report_dir/$base_path.jtr"
hs_err_files=$(ls $report_dir/$base_path/hs_err*.log 2> /dev/null || true)
replay_files=$(ls $report_dir/$base_path/replay*.log 2> /dev/null || true)
echo "#### <a id="$anchor">$test"
echo '<details><summary>View test results</summary>'
echo ''
echo '```'
@@ -76,22 +73,20 @@ for test in $failures $errors; do
echo ''
fi
if [[ "$replay_files" != "" ]]; then
echo '<details><summary>View HotSpot replay file</summary>'
echo ''
for replay in $replay_files; do
echo '```'
echo "$replay:"
echo ''
cat "$replay"
echo '```'
done
echo '</details>'
echo ''
fi
done >> $GITHUB_STEP_SUMMARY
echo ':arrow_right: To see the entire test log, click the job in the list to the left.' >> $GITHUB_STEP_SUMMARY
# With many failures, the summary can easily exceed 1024 kB, the limit set by Github
# Trim it down if so.
summary_size=$(wc -c < $GITHUB_STEP_SUMMARY)
if [[ $summary_size -gt 1000000 ]]; then
# Trim to below 1024 kB, and cut off after the last detail group
head -c 1000000 $GITHUB_STEP_SUMMARY | tac | sed -n -e '/<\/details>/,$ p' | tac > $GITHUB_STEP_SUMMARY.tmp
mv $GITHUB_STEP_SUMMARY.tmp $GITHUB_STEP_SUMMARY
(
echo ''
echo ':x: **WARNING: Summary is too large and has been truncated.**'
echo ''
) >> $GITHUB_STEP_SUMMARY
fi
truncate_summary
echo ':arrow_right: To see the entire test log, click the job in the list to the left.' >> $GITHUB_STEP_SUMMARY

View File

@@ -25,7 +25,6 @@
#
GITHUB_STEP_SUMMARY="$1"
GITHUB_OUTPUT="$2"
test_suite_name=$(cat build/run-test-prebuilt/test-support/test-last-ids.txt)
results_dir=build/run-test-prebuilt/test-results/$test_suite_name/text
@@ -42,13 +41,12 @@ error_count=$(echo $errors | wc -w || true)
if [[ "$failures" = "" && "$errors" = "" ]]; then
# We know something went wrong, but not what
echo 'failure=true' >> $GITHUB_OUTPUT
echo 'error-message=Unspecified test suite failure. Please see log for job for details.' >> $GITHUB_OUTPUT
echo '::set-output name=error-message::Unspecified test suite failure. Please see log for job for details.'
exit 0
fi
echo 'failure=true' >> $GITHUB_OUTPUT
echo "error-message=Test run reported $failure_count test failure(s) and $error_count error(s). See summary for details." >> $GITHUB_OUTPUT
echo '::set-output name=failure::true'
echo "::set-output name=error-message::Test run reported $failure_count test failure(s) and $error_count error(s). See summary for details."
echo '### :boom: Test failures summary' >> $GITHUB_STEP_SUMMARY

View File

@@ -1,41 +0,0 @@
#!/bin/bash
#
# Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
function truncate_summary() {
# With large hs_errs, the summary can easily exceed 1024 kB, the limit set by Github
# Trim it down if so.
summary_size=$(wc -c < $GITHUB_STEP_SUMMARY)
if [[ $summary_size -gt 1000000 ]]; then
# Trim to below 1024 kB, and cut off after the last detail group
head -c 1000000 $GITHUB_STEP_SUMMARY | tac | sed -n -e '/<\/details>/,$ p' | tac > $GITHUB_STEP_SUMMARY.tmp
mv $GITHUB_STEP_SUMMARY.tmp $GITHUB_STEP_SUMMARY
(
echo ''
echo ':x: **WARNING: Summary is too large and has been truncated.**'
echo ''
) >> $GITHUB_STEP_SUMMARY
fi
}

View File

@@ -31,15 +31,15 @@ on:
gcc-major-version:
required: true
type: string
apt-gcc-version:
required: true
type: string
apt-gcc-cross-version:
required: true
type: string
extra-conf-options:
required: false
type: string
configure-arguments:
required: false
type: string
make-arguments:
required: false
type: string
jobs:
build-cross-compile:
@@ -61,36 +61,32 @@ jobs:
debian-arch: arm64
debian-repository: https://httpredir.debian.org/debian/
debian-version: bullseye
tolerate-sysroot-errors: false
- target-cpu: arm
gnu-arch: arm
debian-arch: armhf
debian-repository: https://httpredir.debian.org/debian/
debian-version: bullseye
tolerate-sysroot-errors: false
gnu-abi: eabihf
- target-cpu: s390x
gnu-arch: s390x
debian-arch: s390x
debian-repository: https://httpredir.debian.org/debian/
debian-version: bullseye
tolerate-sysroot-errors: false
- target-cpu: ppc64le
gnu-arch: powerpc64le
debian-arch: ppc64el
debian-repository: https://httpredir.debian.org/debian/
debian-version: bullseye
tolerate-sysroot-errors: false
- target-cpu: riscv64
gnu-arch: riscv64
debian-arch: riscv64
debian-repository: https://httpredir.debian.org/debian/
debian-repository: https://deb.debian.org/debian-ports
debian-keyring: /usr/share/keyrings/debian-ports-archive-keyring.gpg
debian-version: sid
tolerate-sysroot-errors: true
steps:
- name: 'Checkout the JDK source'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Get the BootJDK'
id: bootjdk
@@ -98,9 +94,12 @@ jobs:
with:
platform: linux-x64
- name: 'Get GTest'
id: gtest
uses: ./.github/actions/get-gtest
# Use linux-x64 JDK bundle as build JDK
- name: 'Get build JDK'
id: buildjdk
uses: ./.github/actions/get-bundles
with:
platform: linux-x64
# Upgrading apt to solve libc6 installation bugs, see JDK-8260460.
- name: 'Install toolchain and dependencies'
@@ -109,16 +108,17 @@ jobs:
sudo apt-get update
sudo apt-get install --only-upgrade apt
sudo apt-get install \
gcc-${{ inputs.gcc-major-version }} \
g++-${{ inputs.gcc-major-version }} \
gcc-${{ inputs.gcc-major-version }}-${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}} \
g++-${{ inputs.gcc-major-version }}-${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}} \
libxrandr-dev libxtst-dev libcups2-dev libasound2-dev
gcc-${{ inputs.gcc-major-version }}=${{ inputs.apt-gcc-version }} \
g++-${{ inputs.gcc-major-version }}=${{ inputs.apt-gcc-version }} \
gcc-${{ inputs.gcc-major-version }}-${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}}=${{ inputs.apt-gcc-cross-version }} \
g++-${{ inputs.gcc-major-version }}-${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}}=${{ inputs.apt-gcc-cross-version }} \
libxrandr-dev libxtst-dev libcups2-dev libasound2-dev \
debian-ports-archive-keyring
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${{ inputs.gcc-major-version }} 100 --slave /usr/bin/g++ g++ /usr/bin/g++-${{ inputs.gcc-major-version }}
- name: 'Check cache for sysroot'
id: get-cached-sysroot
uses: actions/cache@v4
uses: actions/cache@v3
with:
path: sysroot
key: sysroot-${{ matrix.debian-arch }}-${{ hashFiles('./.github/workflows/build-cross-compile.yml') }}
@@ -128,19 +128,16 @@ jobs:
if: steps.get-cached-sysroot.outputs.cache-hit != 'true'
- name: 'Create sysroot'
id: create-sysroot
run: >
sudo debootstrap
--no-merged-usr
--arch=${{ matrix.debian-arch }}
--verbose
--include=fakeroot,symlinks,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype-dev,libpng-dev
--include=fakeroot,symlinks,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng-dev
--resolve-deps
--variant=minbase
$(test -n "${{ matrix.debian-keyring }}" && echo "--keyring=${{ matrix.debian-keyring }}")
${{ matrix.debian-version }}
sysroot
${{ matrix.debian-repository }}
continue-on-error: ${{ matrix.tolerate-sysroot-errors }}
if: steps.get-cached-sysroot.outputs.cache-hit != 'true'
- name: 'Prepare sysroot'
@@ -150,17 +147,8 @@ jobs:
sudo chown ${USER} -R sysroot
rm -rf sysroot/{dev,proc,run,sys,var}
rm -rf sysroot/usr/{sbin,bin,share}
rm -rf sysroot/usr/lib/{apt,gcc,udev,systemd}
rm -rf sysroot/usr/libexec/gcc
# /{bin,sbin,lib}/ are not symbolic links to /usr/{bin,sbin,lib}/ when debootstrap with --no-merged-usr
rm -rf sysroot/{sbin,bin}
rm -rf sysroot/lib/{udev,systemd}
if: steps.create-sysroot.outcome == 'success' && steps.get-cached-sysroot.outputs.cache-hit != 'true'
- name: 'Remove broken sysroot'
run: |
sudo rm -rf sysroot/
if: steps.create-sysroot.outcome != 'success' && steps.get-cached-sysroot.outputs.cache-hit != 'true'
rm -rf sysroot/usr/lib/{apt,udev,systemd}
if: steps.get-cached-sysroot.outputs.cache-hit != 'true'
- name: 'Configure'
run: >
@@ -168,25 +156,23 @@ jobs:
--with-conf-name=linux-${{ matrix.target-cpu }}
--with-version-opt=${GITHUB_ACTOR}-${GITHUB_SHA}
--with-boot-jdk=${{ steps.bootjdk.outputs.path }}
--with-gtest=${{ steps.gtest.outputs.path }}
--with-zlib=system
--enable-debug
--disable-precompiled-headers
--openjdk-target=${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}}
--with-sysroot=sysroot
--with-build-jdk=${{ steps.buildjdk.outputs.jdk-path }}
--with-jmod-compress=zip-1
CC=${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}}-gcc-${{ inputs.gcc-major-version }}
CXX=${{ matrix.gnu-arch }}-linux-gnu${{ matrix.gnu-abi}}-g++-${{ inputs.gcc-major-version }}
${{ inputs.extra-conf-options }} ${{ inputs.configure-arguments }} || (
${{ inputs.extra-conf-options }} || (
echo "Dumping config.log:" &&
cat config.log &&
exit 1)
if: steps.create-sysroot.outcome == 'success' || steps.get-cached-sysroot.outputs.cache-hit == 'true'
- name: 'Build'
id: build
uses: ./.github/actions/do-build
with:
make-target: 'hotspot ${{ inputs.make-arguments }}'
make-target: 'hotspot'
platform: linux-${{ matrix.target-cpu }}
if: steps.create-sysroot.outcome == 'success' || steps.get-cached-sysroot.outputs.cache-hit == 'true'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -49,18 +49,15 @@ on:
required: false
type: string
default: ''
apt-gcc-version:
required: true
type: string
apt-architecture:
required: false
type: string
apt-extra-packages:
required: false
type: string
configure-arguments:
required: false
type: string
make-arguments:
required: false
type: string
jobs:
build-linux:
@@ -78,7 +75,7 @@ jobs:
steps:
- name: 'Checkout the JDK source'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Get the BootJDK'
id: bootjdk
@@ -99,7 +96,7 @@ jobs:
run: |
# Set a proper suffix for packages if using a different architecture
if [[ '${{ inputs.apt-architecture }}' != '' ]]; then
echo 'suffix=:${{ inputs.apt-architecture }}' >> $GITHUB_OUTPUT
echo '::set-output name=suffix:::${{ inputs.apt-architecture }}'
fi
# Upgrading apt to solve libc6 installation bugs, see JDK-8260460.
@@ -111,7 +108,7 @@ jobs:
fi
sudo apt-get update
sudo apt-get install --only-upgrade apt
sudo apt-get install gcc-${{ inputs.gcc-major-version }}${{ inputs.gcc-package-suffix }} g++-${{ inputs.gcc-major-version }}${{ inputs.gcc-package-suffix }} libxrandr-dev${{ steps.arch.outputs.suffix }} libxtst-dev${{ steps.arch.outputs.suffix }} libcups2-dev${{ steps.arch.outputs.suffix }} libasound2-dev${{ steps.arch.outputs.suffix }} ${{ inputs.apt-extra-packages }}
sudo apt-get install gcc-${{ inputs.gcc-major-version }}${{ inputs.gcc-package-suffix }}=${{ inputs.apt-gcc-version }} g++-${{ inputs.gcc-major-version }}${{ inputs.gcc-package-suffix }}=${{ inputs.apt-gcc-version }} libxrandr-dev${{ steps.arch.outputs.suffix }} libxtst-dev${{ steps.arch.outputs.suffix }} libcups2-dev${{ steps.arch.outputs.suffix }} libasound2-dev${{ steps.arch.outputs.suffix }} ${{ inputs.apt-extra-packages }}
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-${{ inputs.gcc-major-version }} 100 --slave /usr/bin/g++ g++ /usr/bin/g++-${{ inputs.gcc-major-version }}
- name: 'Configure'
@@ -123,9 +120,10 @@ jobs:
--with-boot-jdk=${{ steps.bootjdk.outputs.path }}
--with-jtreg=${{ steps.jtreg.outputs.path }}
--with-gtest=${{ steps.gtest.outputs.path }}
--enable-jtreg-failure-handler
--with-zlib=system
--with-jmod-compress=zip-1
${{ inputs.extra-conf-options }} ${{ inputs.configure-arguments }} || (
${{ inputs.extra-conf-options }} || (
echo "Dumping config.log:" &&
cat config.log &&
exit 1)
@@ -134,7 +132,7 @@ jobs:
id: build
uses: ./.github/actions/do-build
with:
make-target: '${{ inputs.make-target }} ${{ inputs.make-arguments }}'
make-target: '${{ inputs.make-target }}'
platform: ${{ inputs.platform }}
debug-suffix: '${{ matrix.suffix }}'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -31,9 +31,6 @@ on:
platform:
required: true
type: string
runs-on:
required: true
type: string
extra-conf-options:
required: false
type: string
@@ -48,17 +45,11 @@ on:
xcode-toolset-version:
required: true
type: string
configure-arguments:
required: false
type: string
make-arguments:
required: false
type: string
jobs:
build-macos:
name: build
runs-on: ${{ inputs.runs-on }}
runs-on: macos-11
strategy:
fail-fast: false
@@ -71,13 +62,13 @@ jobs:
steps:
- name: 'Checkout the JDK source'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Get the BootJDK'
id: bootjdk
uses: ./.github/actions/get-bootjdk
with:
platform: ${{ inputs.platform }}
platform: macos-x64
- name: 'Get JTReg'
id: jtreg
@@ -90,7 +81,7 @@ jobs:
- name: 'Install toolchain and dependencies'
run: |
# Run Homebrew installation and xcode-select
brew install autoconf make
brew install make
sudo xcode-select --switch /Applications/Xcode_${{ inputs.xcode-toolset-version }}.app/Contents/Developer
# This will make GNU make available as 'make' and not only as 'gmake'
echo '/usr/local/opt/make/libexec/gnubin' >> $GITHUB_PATH
@@ -104,9 +95,10 @@ jobs:
--with-boot-jdk=${{ steps.bootjdk.outputs.path }}
--with-jtreg=${{ steps.jtreg.outputs.path }}
--with-gtest=${{ steps.gtest.outputs.path }}
--enable-jtreg-failure-handler
--with-zlib=system
--with-jmod-compress=zip-1
${{ inputs.extra-conf-options }} ${{ inputs.configure-arguments }} || (
${{ inputs.extra-conf-options }} || (
echo "Dumping config.log:" &&
cat config.log &&
exit 1)
@@ -115,7 +107,7 @@ jobs:
id: build
uses: ./.github/actions/do-build
with:
make-target: '${{ inputs.make-target }} ${{ inputs.make-arguments }}'
make-target: '${{ inputs.make-target }}'
platform: ${{ inputs.platform }}
debug-suffix: '${{ matrix.suffix }}'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -48,12 +48,6 @@ on:
msvc-toolset-architecture:
required: true
type: string
configure-arguments:
required: false
type: string
make-arguments:
required: false
type: string
env:
# These are needed to make the MSYS2 bash work properly
@@ -63,7 +57,7 @@ env:
jobs:
build-windows:
name: build
runs-on: windows-2025
runs-on: windows-2019
defaults:
run:
shell: bash
@@ -79,7 +73,7 @@ jobs:
steps:
- name: 'Checkout the JDK source'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Get MSYS2'
uses: ./.github/actions/get-msys2
@@ -98,26 +92,12 @@ jobs:
id: gtest
uses: ./.github/actions/get-gtest
- name: 'Check toolchain installed'
id: toolchain-check
run: |
set +e
'/c/Program Files/Microsoft Visual Studio/2022/Enterprise/vc/auxiliary/build/vcvars64.bat' -vcvars_ver=${{ inputs.msvc-toolset-version }}
if [ $? -eq 0 ]; then
echo "Toolchain is already installed"
echo "toolchain-installed=true" >> $GITHUB_OUTPUT
else
echo "Toolchain is not yet installed"
echo "toolchain-installed=false" >> $GITHUB_OUTPUT
fi
- name: 'Install toolchain and dependencies'
run: |
# Run Visual Studio Installer
'/c/Program Files (x86)/Microsoft Visual Studio/Installer/vs_installer.exe' \
modify --quiet --installPath 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise' \
modify --quiet --installPath 'C:/Program Files (x86)/Microsoft Visual Studio/2019/Enterprise' \
--add Microsoft.VisualStudio.Component.VC.${{ inputs.msvc-toolset-version }}.${{ inputs.msvc-toolset-architecture }}
if: steps.toolchain-check.outputs.toolchain-installed != 'true'
- name: 'Configure'
run: >
@@ -128,9 +108,10 @@ jobs:
--with-boot-jdk=${{ steps.bootjdk.outputs.path }}
--with-jtreg=${{ steps.jtreg.outputs.path }}
--with-gtest=${{ steps.gtest.outputs.path }}
--enable-jtreg-failure-handler
--with-msvc-toolset-version=${{ inputs.msvc-toolset-version }}
--with-jmod-compress=zip-1
${{ inputs.extra-conf-options }} ${{ inputs.configure-arguments }} || (
${{ inputs.extra-conf-options }} || (
echo "Dumping config.log:" &&
cat config.log &&
exit 1)
@@ -138,13 +119,12 @@ jobs:
# We need a minimal PATH on Windows
# Set PATH to "", so just GITHUB_PATH is included
PATH: ''
shell: env /usr/bin/bash --login -eo pipefail {0}
- name: 'Build'
id: build
uses: ./.github/actions/do-build
with:
make-target: '${{ inputs.make-target }} ${{ inputs.make-arguments }}'
make-target: '${{ inputs.make-target }}'
platform: ${{ inputs.platform }}
debug-suffix: '${{ matrix.suffix }}'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -35,13 +35,7 @@ on:
platforms:
description: 'Platform(s) to execute on (comma separated, e.g. "linux-x64, macos, aarch64")'
required: true
default: 'linux-x64, linux-x86-hs, linux-x64-variants, linux-cross-compile, macos-x64, macos-aarch64, windows-x64, windows-aarch64, docs'
configure-arguments:
description: 'Additional configure arguments'
required: false
make-arguments:
description: 'Additional make arguments'
required: false
default: 'linux-x64, linux-x86, linux-x64-variants, linux-cross-compile, macos-x64, macos-aarch64, windows-x64, windows-aarch64'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -53,34 +47,21 @@ jobs:
### Determine platforms to include
###
prepare:
name: 'Prepare the run'
select:
name: 'Select platforms'
runs-on: ubuntu-22.04
outputs:
linux-x64: ${{ steps.include.outputs.linux-x64 }}
linux-x86-hs: ${{ steps.include.outputs.linux-x86-hs }}
linux-x86: ${{ steps.include.outputs.linux-x86 }}
linux-x64-variants: ${{ steps.include.outputs.linux-x64-variants }}
linux-cross-compile: ${{ steps.include.outputs.linux-cross-compile }}
macos-x64: ${{ steps.include.outputs.macos-x64 }}
macos-aarch64: ${{ steps.include.outputs.macos-aarch64 }}
windows-x64: ${{ steps.include.outputs.windows-x64 }}
windows-aarch64: ${{ steps.include.outputs.windows-aarch64 }}
docs: ${{ steps.include.outputs.docs }}
steps:
- name: 'Checkout the scripts'
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
make/conf/github-actions.conf
- name: 'Build JTReg'
id: jtreg
uses: ./.github/actions/build-jtreg
# TODO: Now that we are checking out the repo scripts, we can put the following code
# into a separate file
# This function must be inlined in main.yml, or we'd be forced to checkout the repo
- name: 'Check what jobs to run'
id: include
run: |
@@ -90,17 +71,19 @@ jobs:
# 'false' otherwise.
# arg $1: platform name or names to look for
function check_platform() {
if [[ '${{ !secrets.JDK_SUBMIT_FILTER || startsWith(github.ref, 'refs/heads/submit/') }}' == 'false' ]]; then
# If JDK_SUBMIT_FILTER is set, and this is not a "submit/" branch, don't run anything
echo 'false'
return
fi
if [[ $GITHUB_EVENT_NAME == workflow_dispatch ]]; then
input='${{ github.event.inputs.platforms }}'
elif [[ $GITHUB_EVENT_NAME == push ]]; then
if [[ '${{ !secrets.JDK_SUBMIT_FILTER || startsWith(github.ref, 'refs/heads/submit/') }}' == 'false' ]]; then
# If JDK_SUBMIT_FILTER is set, and this is not a "submit/" branch, don't run anything
>&2 echo 'JDK_SUBMIT_FILTER is set and not a "submit/" branch'
echo 'false'
return
else
input='${{ secrets.JDK_SUBMIT_PLATFORMS }}'
fi
else
echo 'Internal error in GHA'
exit 1
fi
normalized_input="$(echo ,$input, | tr -d ' ')"
@@ -121,15 +104,14 @@ jobs:
echo 'false'
}
echo "linux-x64=$(check_platform linux-x64 linux x64)" >> $GITHUB_OUTPUT
echo "linux-x86-hs=$(check_platform linux-x86-hs linux x86)" >> $GITHUB_OUTPUT
echo "linux-x64-variants=$(check_platform linux-x64-variants variants)" >> $GITHUB_OUTPUT
echo "linux-cross-compile=$(check_platform linux-cross-compile cross-compile)" >> $GITHUB_OUTPUT
echo "macos-x64=$(check_platform macos-x64 macos x64)" >> $GITHUB_OUTPUT
echo "macos-aarch64=$(check_platform macos-aarch64 macos aarch64)" >> $GITHUB_OUTPUT
echo "windows-x64=$(check_platform windows-x64 windows x64)" >> $GITHUB_OUTPUT
echo "windows-aarch64=$(check_platform windows-aarch64 windows aarch64)" >> $GITHUB_OUTPUT
echo "docs=$(check_platform docs)" >> $GITHUB_OUTPUT
echo "::set-output name=linux-x64::$(check_platform linux-x64 linux x64)"
echo "::set-output name=linux-x86::$(check_platform linux-x86 linux x86)"
echo "::set-output name=linux-x64-variants::$(check_platform linux-x64-variants variants)"
echo "::set-output name=linux-cross-compile::$(check_platform linux-cross-compile cross-compile)"
echo "::set-output name=macos-x64::$(check_platform macos-x64 macos x64)"
echo "::set-output name=macos-aarch64::$(check_platform macos-aarch64 macos aarch64)"
echo "::set-output name=windows-x64::$(check_platform windows-x64 windows x64)"
echo "::set-output name=windows-aarch64::$(check_platform windows-aarch64 windows aarch64)"
###
### Build jobs
@@ -137,78 +119,73 @@ jobs:
build-linux-x64:
name: linux-x64
needs: prepare
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
gcc-major-version: '10'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x64 == 'true'
apt-gcc-version: '10.3.0-15ubuntu1'
# The linux-x64 jdk bundle is used as buildjdk for the cross-compile job
if: needs.select.outputs.linux-x64 == 'true' || needs.select.outputs.linux-cross-compile == 'true'
build-linux-x86-hs:
name: linux-x86-hs
needs: prepare
build-linux-x86:
name: linux-x86
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x86
make-target: 'hotspot'
gcc-major-version: '10'
gcc-package-suffix: '-multilib'
apt-gcc-version: '10.3.0-15ubuntu1'
apt-architecture: 'i386'
# Some multilib libraries do not have proper inter-dependencies, so we have to
# install their dependencies manually.
apt-extra-packages: 'libfreetype-dev:i386 libtiff-dev:i386 libcupsimage2-dev:i386 libc6-i386 libgcc-s1:i386 libstdc++6:i386'
apt-extra-packages: 'libfreetype6-dev:i386 libtiff-dev:i386 libcupsimage2-dev:i386 libc6-i386'
extra-conf-options: '--with-target-bits=32'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x86-hs == 'true'
if: needs.select.outputs.linux-x86 == 'true'
build-linux-x64-hs-nopch:
name: linux-x64-hs-nopch
needs: prepare
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
make-target: 'hotspot'
debug-levels: '[ "debug" ]'
gcc-major-version: '10'
apt-gcc-version: '10.3.0-15ubuntu1'
extra-conf-options: '--disable-precompiled-headers'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x64-variants == 'true'
if: needs.select.outputs.linux-x64-variants == 'true'
build-linux-x64-hs-zero:
name: linux-x64-hs-zero
needs: prepare
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
make-target: 'hotspot'
debug-levels: '[ "debug" ]'
gcc-major-version: '10'
apt-gcc-version: '10.3.0-15ubuntu1'
extra-conf-options: '--with-jvm-variants=zero --disable-precompiled-headers'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x64-variants == 'true'
if: needs.select.outputs.linux-x64-variants == 'true'
build-linux-x64-hs-minimal:
name: linux-x64-hs-minimal
needs: prepare
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
make-target: 'hotspot'
debug-levels: '[ "debug" ]'
gcc-major-version: '10'
apt-gcc-version: '10.3.0-15ubuntu1'
extra-conf-options: '--with-jvm-variants=minimal --disable-precompiled-headers'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x64-variants == 'true'
if: needs.select.outputs.linux-x64-variants == 'true'
build-linux-x64-hs-optimized:
name: linux-x64-hs-optimized
needs: prepare
needs: select
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
@@ -216,86 +193,62 @@ jobs:
# Technically this is not the "debug" level, but we can't inject a new matrix state for just this job
debug-levels: '[ "debug" ]'
gcc-major-version: '10'
apt-gcc-version: '10.3.0-15ubuntu1'
extra-conf-options: '--with-debug-level=optimized --disable-precompiled-headers'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-x64-variants == 'true'
if: needs.select.outputs.linux-x64-variants == 'true'
build-linux-cross-compile:
name: linux-cross-compile
needs: prepare
needs:
- select
- build-linux-x64
uses: ./.github/workflows/build-cross-compile.yml
with:
gcc-major-version: '10'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.linux-cross-compile == 'true'
apt-gcc-version: '10.3.0-15ubuntu1'
apt-gcc-cross-version: '10.3.0-8ubuntu1cross1'
if: needs.select.outputs.linux-cross-compile == 'true'
build-macos-x64:
name: macos-x64
needs: prepare
needs: select
uses: ./.github/workflows/build-macos.yml
with:
platform: macos-x64
runs-on: 'macos-13'
xcode-toolset-version: '14.3.1'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.macos-x64 == 'true'
xcode-toolset-version: '11.7'
if: needs.select.outputs.macos-x64 == 'true'
build-macos-aarch64:
name: macos-aarch64
needs: prepare
needs: select
uses: ./.github/workflows/build-macos.yml
with:
platform: macos-aarch64
runs-on: 'macos-14'
xcode-toolset-version: '15.4'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.macos-aarch64 == 'true'
xcode-toolset-version: '12.4'
extra-conf-options: '--openjdk-target=aarch64-apple-darwin'
if: needs.select.outputs.macos-aarch64 == 'true'
build-windows-x64:
name: windows-x64
needs: prepare
needs: select
uses: ./.github/workflows/build-windows.yml
with:
platform: windows-x64
msvc-toolset-version: '14.44'
msvc-toolset-version: '14.29'
msvc-toolset-architecture: 'x86.x64'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.windows-x64 == 'true'
if: needs.select.outputs.windows-x64 == 'true'
build-windows-aarch64:
name: windows-aarch64
needs: prepare
needs: select
uses: ./.github/workflows/build-windows.yml
with:
platform: windows-aarch64
msvc-toolset-version: '14.44'
msvc-toolset-version: '14.29'
msvc-toolset-architecture: 'arm64'
make-target: 'hotspot'
extra-conf-options: '--openjdk-target=aarch64-unknown-cygwin'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.windows-aarch64 == 'true'
build-docs:
name: docs
needs: prepare
uses: ./.github/workflows/build-linux.yml
with:
platform: linux-x64
debug-levels: '[ "debug" ]'
make-target: 'docs-jdk-bundles'
# Make sure we never try to make full docs, since that would require a
# build JDK, and we do not need the additional testing of the graphs.
extra-conf-options: '--disable-full-docs'
gcc-major-version: '10'
configure-arguments: ${{ github.event.inputs.configure-arguments }}
make-arguments: ${{ github.event.inputs.make-arguments }}
if: needs.prepare.outputs.docs == 'true'
if: needs.select.outputs.windows-aarch64 == 'true'
###
### Test jobs
@@ -311,6 +264,16 @@ jobs:
bootjdk-platform: linux-x64
runs-on: ubuntu-22.04
test-linux-x86:
name: linux-x86
needs:
- build-linux-x86
uses: ./.github/workflows/test.yml
with:
platform: linux-x86
bootjdk-platform: linux-x64
runs-on: ubuntu-22.04
test-macos-x64:
name: macos-x64
needs:
@@ -319,19 +282,7 @@ jobs:
with:
platform: macos-x64
bootjdk-platform: macos-x64
runs-on: macos-13
xcode-toolset-version: '14.3.1'
test-macos-aarch64:
name: macos-aarch64
needs:
- build-macos-aarch64
uses: ./.github/workflows/test.yml
with:
platform: macos-aarch64
bootjdk-platform: macos-aarch64
runs-on: macos-14
xcode-toolset-version: '15.4'
runs-on: macos-11
test-windows-x64:
name: windows-x64
@@ -341,4 +292,51 @@ jobs:
with:
platform: windows-x64
bootjdk-platform: windows-x64
runs-on: windows-2025
runs-on: windows-2019
# Remove bundles so they are not misconstrued as binary distributions from the JDK project
remove-bundles:
name: 'Remove bundle artifacts'
runs-on: ubuntu-22.04
if: always()
needs:
- build-linux-x64
- build-linux-x86
- build-linux-x64-hs-nopch
- build-linux-x64-hs-zero
- build-linux-x64-hs-minimal
- build-linux-x64-hs-optimized
- build-linux-cross-compile
- build-macos-x64
- build-macos-aarch64
- build-windows-x64
- build-windows-aarch64
- test-linux-x64
- test-linux-x86
- test-macos-x64
- test-windows-x64
steps:
# Hack to get hold of the api environment variables that are only defined for actions
- name: 'Get API configuration'
id: api
uses: actions/github-script@v6
with:
script: 'return { url: process.env["ACTIONS_RUNTIME_URL"], token: process.env["ACTIONS_RUNTIME_TOKEN"] }'
- name: 'Remove bundle artifacts'
run: |
# Find and remove all bundle artifacts
ALL_ARTIFACT_URLS="$(curl -s \
-H 'Accept: application/json;api-version=6.0-preview' \
-H 'Authorization: Bearer ${{ fromJson(steps.api.outputs.result).token }}' \
'${{ fromJson(steps.api.outputs.result).url }}_apis/pipelines/workflows/${{ github.run_id }}/artifacts?api-version=6.0-preview')"
BUNDLE_ARTIFACT_URLS="$(echo "$ALL_ARTIFACT_URLS" | jq -r -c '.value | map(select(.name|startswith("bundles-"))) | .[].url')"
for url in $BUNDLE_ARTIFACT_URLS; do
echo "Removing $url"
curl -s \
-H 'Accept: application/json;api-version=6.0-preview' \
-H 'Authorization: Bearer ${{ fromJson(steps.api.outputs.result).token }}' \
-X DELETE "$url" \
|| echo "Failed to remove bundle"
done

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2022, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -37,9 +37,6 @@ on:
runs-on:
required: true
type: string
xcode-toolset-version:
required: false
type: string
env:
# These are needed to make the MSYS2 bash work properly
@@ -63,14 +60,10 @@ jobs:
- 'jdk/tier1 part 3'
- 'langtools/tier1'
- 'hs/tier1 common'
- 'hs/tier1 compiler part 1'
- 'hs/tier1 compiler part 2'
- 'hs/tier1 compiler part 3'
- 'hs/tier1 compiler not-xcomp'
- 'hs/tier1 compiler'
- 'hs/tier1 gc'
- 'hs/tier1 runtime'
- 'hs/tier1 serviceability'
- 'lib-test/tier1'
include:
- test-name: 'jdk/tier1 part 1'
@@ -89,20 +82,8 @@ jobs:
test-suite: 'test/hotspot/jtreg/:tier1_common'
debug-suffix: -debug
- test-name: 'hs/tier1 compiler part 1'
test-suite: 'test/hotspot/jtreg/:tier1_compiler_1'
debug-suffix: -debug
- test-name: 'hs/tier1 compiler part 2'
test-suite: 'test/hotspot/jtreg/:tier1_compiler_2'
debug-suffix: -debug
- test-name: 'hs/tier1 compiler part 3'
test-suite: 'test/hotspot/jtreg/:tier1_compiler_3'
debug-suffix: -debug
- test-name: 'hs/tier1 compiler not-xcomp'
test-suite: 'test/hotspot/jtreg/:tier1_compiler_not_xcomp'
- test-name: 'hs/tier1 compiler'
test-suite: 'test/hotspot/jtreg/:tier1_compiler'
debug-suffix: -debug
- test-name: 'hs/tier1 gc'
@@ -117,13 +98,9 @@ jobs:
test-suite: 'test/hotspot/jtreg/:tier1_serviceability'
debug-suffix: -debug
- test-name: 'lib-test/tier1'
test-suite: 'test/lib-test/:tier1'
debug-suffix: -debug
steps:
- name: 'Checkout the JDK source'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Get MSYS2'
uses: ./.github/actions/get-msys2
@@ -150,7 +127,7 @@ jobs:
run: |
# On macOS we need to install some dependencies for testing
brew install make
sudo xcode-select --switch /Applications/Xcode_${{ inputs.xcode-toolset-version }}.app/Contents/Developer
sudo xcode-select --switch /Applications/Xcode_11.7.app/Contents/Developer
# This will make GNU make available as 'make' and not only as 'gmake'
echo '/usr/local/opt/make/libexec/gnubin' >> $GITHUB_PATH
if: runner.os == 'macOS'
@@ -161,9 +138,9 @@ jobs:
# We need a minimal PATH on Windows
# Set PATH to "", so just GITHUB_PATH is included
if [[ '${{ runner.os }}' == 'Windows' ]]; then
echo "value=" >> $GITHUB_OUTPUT
echo "::set-output name=value::"
else
echo "value=$PATH" >> $GITHUB_OUTPUT
echo "::set-output name=value::$PATH"
fi
- name: 'Run tests'
@@ -177,7 +154,7 @@ jobs:
SYMBOLS_IMAGE_DIR=${{ steps.bundles.outputs.symbols-path }}
TEST_IMAGE_DIR=${{ steps.bundles.outputs.tests-path }}
JTREG='JAVA_OPTIONS=-XX:-CreateCoredumpOnCrash;VERBOSE=fail,error,time;KEYWORDS=!headful'
&& bash ./.github/scripts/gen-test-summary.sh "$GITHUB_STEP_SUMMARY" "$GITHUB_OUTPUT"
&& bash ./.github/scripts/gen-test-summary.sh "$GITHUB_STEP_SUMMARY"
env:
PATH: ${{ steps.path.outputs.value }}
@@ -210,11 +187,11 @@ jobs:
fi
artifact_name="results-${{ inputs.platform }}-$(echo ${{ matrix.test-name }} | tr '/ ' '__')"
echo "artifact-name=$artifact_name" >> $GITHUB_OUTPUT
echo "::set-output name=artifact-name::$artifact_name"
if: always()
- name: 'Upload test results'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
path: results
name: ${{ steps.package.outputs.artifact-name }}
@@ -222,7 +199,7 @@ jobs:
# This is the best way I found to abort the job with an error message
- name: 'Notify about test failures'
uses: actions/github-script@v7
uses: actions/github-script@v6
with:
script: core.setFailed('${{ steps.run-tests.outputs.error-message }}')
if: steps.run-tests.outputs.failure == 'true'

5
.gitignore vendored
View File

@@ -18,8 +18,3 @@ NashornProfile.txt
/src/utils/LogCompilation/target/
/.project/
/.settings/
/.project
/.classpath
/.cproject
/compile_commands.json
/.cache

View File

@@ -1,11 +1,10 @@
[general]
project=jdk-updates
project=jdk
jbs=JDK
version=21.0.9
version=20
[checks]
error=author,committer,reviewers,merge,issues,executable,symlink,message,hg-tag,whitespace,problemlists
warning=issuestitle,binary
[repository]
tags=(?:jdk-(?:[1-9]([0-9]*)(?:\.(?:0|[1-9][0-9]*)){0,4})(?:\+(?:(?:[0-9]+))|(?:-ga)))|(?:jdk[4-9](?:u\d{1,3})?-(?:(?:b\d{2,3})|(?:ga)))|(?:hs\d\d(?:\.\d{1,2})?-b\d\d)
@@ -16,7 +15,7 @@ version=0
domain=openjdk.org
[checks "whitespace"]
files=.*\.cpp|.*\.hpp|.*\.c|.*\.h|.*\.java|.*\.cc|.*\.hh|.*\.m|.*\.mm|.*\.md|.*\.gmk|.*\.m4|.*\.ac|Makefile
files=.*\.cpp|.*\.hpp|.*\.c|.*\.h|.*\.java|.*\.cc|.*\.hh|.*\.m|.*\.mm|.*\.gmk|.*\.m4|.*\.ac|Makefile
ignore-tabs=.*\.gmk|Makefile
[checks "merge"]

View File

@@ -2,8 +2,8 @@
OPENJDK ASSEMBLY EXCEPTION
The OpenJDK source code made available by Oracle America, Inc. (Oracle) at
openjdk.org ("OpenJDK Code") is distributed under the terms of the GNU
General Public License <https://www.gnu.org/copyleft/gpl.html> version 2
openjdk.java.net ("OpenJDK Code") is distributed under the terms of the GNU
General Public License <http://www.gnu.org/copyleft/gpl.html> version 2
only ("GPL2"), with the following clarification and special exception.
Linking this OpenJDK Code statically or dynamically with other code
@@ -12,7 +12,7 @@ only ("GPL2"), with the following clarification and special exception.
As a special exception, Oracle gives you permission to link this
OpenJDK Code with certain code licensed by Oracle as indicated at
https://openjdk.org/legal/exception-modules-2007-05-08.html
http://openjdk.java.net/legal/exception-modules-2007-05-08.html
("Designated Exception Modules") to produce an executable,
regardless of the license terms of the Designated Exception Modules,
and to copy and distribute the resulting executable under GPL2,

View File

@@ -1,3 +1,3 @@
# Contributing to the JDK
Please see <https://openjdk.org/contribute> for how to contribute.
Please see <https://openjdk.java.net/contribute/> for how to contribute.

View File

@@ -1,12 +1,58 @@
# Welcome to OpenJDK 21 Updates!
# Welcome to the JDK!
The JDK 21 Updates project uses two GitHub repositories.
Updates are continuously developed in the repository [jdk21u-dev](https://github.com/openjdk/jdk21u-dev). This is the repository usually targeted by contributors.
The [jdk21u](https://github.com/openjdk/jdk21u) repository is used for rampdown of the update releases of jdk21u and only accepts critical changes that must make the next release during rampdown. (You probably do not want to target jdk21u).
## Wakefield
This is a temporary section created to host information on the
[Wakefield](https://wiki.openjdk.java.net/display/wakefield) project.
For more OpenJDK 21 updates specific information such as timelines and contribution guidelines see the [project wiki page](https://wiki.openjdk.org/display/JDKUpdates/JDK+21u/).
### Building
There are two addition `configure` arguments:
```
--with-wayland specify prefix directory for the wayland package
(expecting the headers under PATH/include)
--with-wayland-include specify directory for the wayland include files
```
As usual, there should be no need to specify those explicitly unless you're doing
something tricky.
However, a variant of `libwayland-dev` needs to be installed on the build system.
### Running
Make sure your system is configured such that `libwayland` can find the socket to connect to;
usually this means that the environment variable `WAYLAND_DISPLAY` is set to something
sensible. Then add this argument to `java`
```
-Dawt.toolkit.name=WLToolkit
```
### Testing
Testing that involves `Robot` is done inside a [Weston](https://gitlab.freedesktop.org/wayland/weston/)
instance with a special module loaded called `libwakefield`
that provides the necessary functionality. The Wayland-specific tests are therefore executed with a dedicated test driver
`test/jdk/java/awt/wakefield/WakefieldTestDriver.java`. The driver also provides an easy
way to run the test in several configurations with a different size and even number
of "outputs" (monitors).
To run the Wayland-specific tests, perform these steps:
* Install Weston version 9 (earlier versions are known NOT to work).
* Obtain `libwakefield.so` either by building from source (available under
`src/java.desktop/share/native/libwakefield` and not integrated into the rest of the
build infrastructure; see `README.md` there)
or by fetching the latest pre-built `x64` binary
```
wget https://github.com/mkartashev/wakefield/raw/main/libwakefield.so
```
* Set `LIBWAKEFIELD` environment variable to the full path to `libwakefield.so`
```
export LIBWAKEFIELD=/tmp/wakefield-testing/libwakefield.so
```
* Run `jtreg` like so
```
jtreg -e:XDG_RUNTIME_DIR -e:LIBWAKEFIELD -testjdk:... test/jdk/java/awt/wakefield/
```
This was verified to work in `Ubuntu 21.10`.
This does NOT work in `Ubuntu 21.04` or `Fedora 34`.
## Generic Info (not Wakefield-specific)
For build instructions please see the
[online documentation](https://openjdk.org/groups/build/doc/building.html),
or either of these files:
@@ -14,6 +60,5 @@ or either of these files:
- [doc/building.html](doc/building.html) (html version)
- [doc/building.md](doc/building.md) (markdown version)
See <https://openjdk.org/> for more information about the OpenJDK
Community and the JDK and see <https://bugs.openjdk.org> for JDK issue
tracking.
See <https://openjdk.org/> for more information about
the OpenJDK Community and the JDK.

View File

@@ -1,3 +0,0 @@
# JDK Vulnerabilities
Please follow the process outlined in the [OpenJDK Vulnerability Policy](https://openjdk.org/groups/vulnerability/report) to disclose vulnerabilities in the JDK.

View File

@@ -193,7 +193,17 @@ for root in $MODULE_ROOTS; do
root=`wslpath -am $root`
fi
VM_CI="jdk.internal.vm.ci/share/classes"
VM_COMPILER="src/jdk.internal.vm.compiler/share/classes"
if test "${root#*$VM_CI}" != "$root" || test "${root#*$VM_COMPILER}" != "$root"; then
for subdir in "$root"/*; do
if [ -d "$subdir" ]; then
SOURCES=$SOURCES" $SOURCE_PREFIX""$subdir"/src"$SOURCE_POSTFIX"
fi
done
else
SOURCES=$SOURCES" $SOURCE_PREFIX""$root""$SOURCE_POSTFIX"
fi
done
add_replacement "###SOURCE_ROOTS###" "$SOURCES"

View File

@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -128,15 +128,6 @@ install_jib() {
exit 1
fi
fi
# Want to check the filetype using file, to see if we got served a HTML error page.
# This is sensitive to the filename containing a specific string, but good enough.
file "${installed_jib_script}.gz" | grep "gzip compressed data" > /dev/null
if [ $? -ne 0 ]; then
echo "Warning: ${installed_jib_script}.gz is not a gzip file."
echo "If you are behind a proxy you may need to configure exceptions using no_proxy."
echo "The download URL was: ${jib_url}"
exit 1
fi
echo "Extracting JIB bootstrap script"
rm -f "${installed_jib_script}"
gunzip "${installed_jib_script}.gz"
@@ -144,28 +135,6 @@ install_jib() {
echo "${data_string}" > "${install_data}"
}
# Returns a shell-escaped version of the argument given.
shell_quote() {
if [[ -n "$1" ]]; then
# Uses only shell-safe characters? No quoting needed.
# '=' is a zsh meta-character, but only in word-initial position.
if echo "$1" | grep '^[ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\.:,%/+=_-]\{1,\}$' > /dev/null \
&& ! echo "$1" | grep '^=' > /dev/null; then
quoted="$1"
else
if echo "$1" | grep "[\'!]" > /dev/null; then
# csh does history expansion within single quotes, but not
# when backslash-escaped!
local quoted_quote="'\\''" quoted_exclam="'\\!'"
word="${1//\'/${quoted_quote}}"
word="${1//\!/${quoted_exclam}}"
fi
quoted="'$1'"
fi
echo "$quoted"
fi
}
# Main body starts here
setup_url
@@ -182,16 +151,4 @@ if [ -z "${JIB_SRC_DIR}" ]; then
export JIB_SRC_DIR="${mydir}/../"
fi
# Save the original command line
conf_quoted_arguments=()
for conf_option; do
conf_quoted_arguments=("${conf_quoted_arguments[@]}" "$(shell_quote "$conf_option")")
done
export REAL_CONFIGURE_COMMAND_LINE="${conf_quoted_arguments[@]}"
myfulldir="$(cd "${mydir}" > /dev/null && pwd)"
export REAL_CONFIGURE_COMMAND_EXEC_FULL="$BASH $myfulldir/$myname"
export REAL_CONFIGURE_COMMAND_EXEC_SHORT="$myname"
${installed_jib_script} "$@"

File diff suppressed because it is too large Load Diff

View File

@@ -126,8 +126,6 @@ space is required.
Even for 32-bit builds, it is recommended to use a 64-bit build machine, and
instead create a 32-bit target using `--with-target-bits=32`.
Note: The Windows 32-bit x86 port is deprecated and may be removed in a future release.
### Building on aarch64
At a minimum, a machine with 8 cores is advisable, as well as 8 GB of RAM.
@@ -164,11 +162,11 @@ This table lists the OS versions used by Oracle when building the JDK. Such
information is always subject to change, but this table is up to date at the
time of writing.
| Operating system | Vendor/version used |
| ----------------- | ---------------------------------- |
| Linux | Oracle Enterprise Linux 6.4 / 7.6 |
| macOS | Mac OS X 10.13 (High Sierra) |
| Windows | Windows Server 2012 R2 |
Operating system Vendor/version used
----------------- -------------------------------------------------------
Linux Oracle Enterprise Linux 6.4 / 7.6
macOS Mac OS X 10.13 (High Sierra)
Windows Windows Server 2012 R2
The double version numbers for Linux are due to the hybrid model
used at Oracle, where header files and external libraries from an older version
@@ -201,8 +199,6 @@ rule also applies to input to the build system, e.g. in arguments to
`--with-msvcr-dll=c:\msvcr100.dll`. For details on this conversion, see the section
on [Fixpath](#fixpath).
Note: The Windows 32-bit x86 port is deprecated and may be removed in a future release.
#### Cygwin
A functioning [Cygwin](http://www.cygwin.com/) environment is required for
@@ -315,17 +311,14 @@ Build Wiki page for details about which versions of AIX are supported.
Large portions of the JDK consists of native code, that needs to be compiled to
be able to run on the target platform. In theory, toolchain and operating
system should be independent factors, but in practice there's more or less a
one-to-one correlation between target operating system and toolchain. There are
ongoing efforts to loosen this strict coupling between compiler and operating
system (see [JDK-8288293](https://bugs.openjdk.org/browse/JDK-8288293)) but it
will likely be a very long time before this goal can be realized.
one-to-one correlation between target operating system and toolchain.
| Operating system | Supported toolchain |
| ------------------ | ------------------------- |
| Linux | gcc, clang |
| macOS | Apple Xcode (using clang) |
| AIX | IBM XL C/C++ |
| Windows | Microsoft Visual Studio |
Operating system Supported toolchain
------------------ -------------------------
Linux gcc, clang
macOS Apple Xcode (using clang)
AIX IBM XL C/C++
Windows Microsoft Visual Studio
Please see the individual sections on the toolchains for version
recommendations. As a reference, these versions of the toolchains are used, at
@@ -334,11 +327,11 @@ possible to compile the JDK with both older and newer versions, but the closer
you stay to this list, the more likely you are to compile successfully without
issues.
| Operating system | Toolchain version |
| ------------------ | ------------------------------------------ |
| Linux | gcc 11.2.0 |
| macOS | Apple Xcode 10.1 (using clang 10.0.0) |
| Windows | Microsoft Visual Studio 2022 update 17.1.0 |
Operating system Toolchain version
------------------ -------------------------------------------------------
Linux gcc 11.2.0
macOS Apple Xcode 10.1 (using clang 10.0.0)
Windows Microsoft Visual Studio 2022 update 17.1.0
All compilers are expected to be able to compile to the C99 language standard,
as some C99 features are used in the source code. Microsoft Visual Studio
@@ -366,17 +359,20 @@ To use clang instead of gcc on Linux, use `--with-toolchain-type=clang`.
The oldest supported version of Xcode is 8.
You will need to download Xcode either from the App Store or specific versions
can be easily located via the [Xcode Releases](https://xcodereleases.com)
website.
You will need the Xcode command lines developers tools to be able to build
the JDK. (Actually, *only* the command lines tools are needed, not the IDE.)
The simplest way to install these is to run:
```
xcode-select --install
```
When updating Xcode, it is advisable to keep an older version for building the JDK.
To use a specific version of Xcode you have multiple options:
* Use `xcode-select -s` before running `configure`, e.g. `xcode-select -s /Applications/Xcode13.1.app`. The drawback is that the setting
is system wide and you may have to revert it after an OpenJDK build.
* Use configure option `--with-xcode-path`, e.g. `configure --with-xcode-path=/Applications/Xcode13.1.app`
This allows using a specific Xcode version for an OpenJDK build, independently of the active Xcode version by `xcode-select`.
It is advisable to keep an older version of Xcode for building the JDK when
updating Xcode. This [blog page](
http://iosdevelopertips.com/xcode/install-multiple-versions-of-xcode.html) has
good suggestions on managing multiple Xcode versions. To use a specific version
of Xcode, use `xcode-select -s` before running `configure`, or use
`--with-toolchain-path` to point to the version of Xcode to use, e.g.
`configure --with-toolchain-path=/Applications/Xcode8.app/Contents/Developer/usr/bin`
If you have recently (inadvertently) updated your OS and/or Xcode version, and
the JDK can no longer be built, please see the section on [Problems with the
@@ -472,19 +468,6 @@ rather than bundling the JDK's own copy.
Use `--with-freetype-include=<path>` and `--with-freetype-lib=<path>`
if `configure` does not automatically locate the platform FreeType files.
### Fontconfig
Fontconfig from [freedesktop.org Fontconfig](http://fontconfig.org) is required
on all platforms except Windows and macOS.
* To install on an apt-based Linux, try running `sudo apt-get install
libfontconfig-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
fontconfig-devel`.
Use `--with-fontconfig-include=<path>` and `--with-fontconfig=<path>`
if `configure` does not automatically locate the platform Fontconfig files.
### CUPS
CUPS, [Common UNIX Printing System](http://www.cups.org) header files are
@@ -874,18 +857,17 @@ containing `lib/jtreg.jar` etc.
The [Adoption Group](https://wiki.openjdk.org/display/Adoption) provides
recent builds of jtreg [here](
https://ci.adoptium.net/view/Dependencies/job/dependency_pipeline/lastSuccessfulBuild/artifact/jtreg/).
https://ci.adoptopenjdk.net/view/Dependencies/job/dependency_pipeline/lastSuccessfulBuild/artifact/jtreg/).
Download the latest `.tar.gz` file, unpack it, and point `--with-jtreg` to the
`jtreg` directory that you just unpacked.
Building of Hotspot Gtest suite requires the source code of Google
Test framework. The top directory, which contains both `googletest`
and `googlemock` directories, should be specified via `--with-gtest`.
The minimum supported version of Google Test is 1.14.0, whose source
code can be obtained:
Building of Hotspot Gtest suite requires the source code of Google Test framework.
The top directory, which contains both `googletest` and `googlemock`
directories, should be specified via `--with-gtest`.
The supported version of Google Test is 1.8.1, whose source code can be obtained:
* by downloading and unpacking the source bundle from [here](https://github.com/google/googletest/releases/tag/v1.14.0)
* or by checking out `v1.14.0` tag of `googletest` project: `git clone -b v1.14.0 https://github.com/google/googletest`
* by downloading and unpacking the source bundle from [here](https://github.com/google/googletest/releases/tag/release-1.8.1)
* or by checking out `release-1.8.1` tag of `googletest` project: `git clone -b release-1.8.1 https://github.com/google/googletest`
To execute the most basic tests (tier 1), use:
```
@@ -984,14 +966,14 @@ https://sourceware.org/autobook/autobook/autobook_17.html). If no
targets are given, a native toolchain for the current platform will be
created. Currently, at least the following targets are known to work:
| Supported devkit targets |
| ------------------------ |
| x86_64-linux-gnu |
| aarch64-linux-gnu |
| arm-linux-gnueabihf |
| ppc64-linux-gnu |
| ppc64le-linux-gnu |
| s390x-linux-gnu |
Supported devkit targets
-------------------------
x86_64-linux-gnu
aarch64-linux-gnu
arm-linux-gnueabihf
ppc64-linux-gnu
ppc64le-linux-gnu
s390x-linux-gnu
`BASE_OS` must be one of "OEL6" for Oracle Enterprise Linux 6 or
"Fedora" (if not specified "OEL6" will be the default). If the base OS
@@ -1161,7 +1143,7 @@ Note that X11 is needed even if you only want to build a headless JDK.
### Cross compiling with Debian sysroots
Fortunately, you can create sysroots for foreign architectures with tools
provided by your OS. On Debian/Ubuntu systems, one could use `debootstrap` to
provided by your OS. On Debian/Ubuntu systems, one could use `qemu-deboostrap` to
create the *target* system chroot, which would have the native libraries and headers
specific to that *target* system. After that, we can use the cross-compiler on the *build*
system, pointing into chroot to get the build dependencies right. This allows building
@@ -1176,7 +1158,7 @@ For example, cross-compiling to AArch64 from x86_64 could be done like this:
* Create chroot on the *build* system, configuring it for *target* system:
```
sudo debootstrap \
sudo qemu-debootstrap \
--arch=arm64 \
--verbose \
--include=fakeroot,symlinks,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng-dev,libffi-dev \
@@ -1184,8 +1166,6 @@ For example, cross-compiling to AArch64 from x86_64 could be done like this:
buster \
~/sysroot-arm64 \
http://httpredir.debian.org/debian/
# If the target architecture is `riscv64`,
# the path should be `debian-ports` instead of `debian`.
```
* Make sure the symlinks inside the newly created chroot point to proper locations:
@@ -1218,22 +1198,21 @@ it might require a little nudge with:
Architectures that are known to successfully cross-compile like this are:
| Target | Debian tree | Debian arch | `--openjdk-target=...` | `--with-jvm-variants=...` |
| ------------ | ------------ | ------------- | ------------------------ | ------------------------- |
| x86 | buster | i386 | i386-linux-gnu | (all) |
| arm | buster | armhf | arm-linux-gnueabihf | (all) |
| aarch64 | buster | arm64 | aarch64-linux-gnu | (all) |
| ppc64le | buster | ppc64el | powerpc64le-linux-gnu | (all) |
| s390x | buster | s390x | s390x-linux-gnu | (all) |
| mipsle | buster | mipsel | mipsel-linux-gnu | zero |
| mips64le | buster | mips64el | mips64el-linux-gnueabi64 | zero |
| armel | buster | arm | arm-linux-gnueabi | zero |
| ppc | sid | powerpc | powerpc-linux-gnu | zero |
| ppc64be | sid | ppc64 | powerpc64-linux-gnu | (all) |
| m68k | sid | m68k | m68k-linux-gnu | zero |
| alpha | sid | alpha | alpha-linux-gnu | zero |
| sh4 | sid | sh4 | sh4-linux-gnu | zero |
| riscv64 | sid | riscv64 | riscv64-linux-gnu | (all) |
Target Debian tree Debian arch `--openjdk-target=...` `--with-jvm-variants=...`
------------ ------------ ------------- ------------------------ --------------
x86 buster i386 i386-linux-gnu (all)
arm buster armhf arm-linux-gnueabihf (all)
aarch64 buster arm64 aarch64-linux-gnu (all)
ppc64le buster ppc64el powerpc64le-linux-gnu (all)
s390x buster s390x s390x-linux-gnu (all)
mipsle buster mipsel mipsel-linux-gnu zero
mips64le buster mips64el mips64el-linux-gnueabi64 zero
armel buster arm arm-linux-gnueabi zero
ppc sid powerpc powerpc-linux-gnu zero
ppc64be sid ppc64 powerpc64-linux-gnu (all)
m68k sid m68k m68k-linux-gnu zero
alpha sid alpha alpha-linux-gnu zero
sh4 sid sh4 sh4-linux-gnu zero
### Building for ARM/aarch64
@@ -1243,44 +1222,6 @@ available using `--with-abi-profile`: arm-vfp-sflt, arm-vfp-hflt, arm-sflt,
armv5-vfp-sflt, armv6-vfp-hflt. Note that soft-float ABIs are no longer
properly supported by the JDK.
### Building for RISC-V
The RISC-V community provides a basic
[GNU compiler toolchain](https://github.com/riscv-collab/riscv-gnu-toolchain),
but the [external libraries](#External-Library-Requirements) required by OpenJDK
complicate the building process. The placeholder `<toolchain-installed-path>`
shown below is the path where you want to install the toolchain.
* Install the RISC-V GNU compiler toolchain:
```
git clone --recursive https://github.com/riscv-collab/riscv-gnu-toolchain
cd riscv-gnu-toolchain
./configure --prefix=<toolchain-installed-path>
make linux
export PATH=<toolchain-installed-path>/bin:$PATH
```
* Cross-compile all the required libraries:
```
# An example for libffi
git clone https://github.com/libffi/libffi
cd libffi
./configure --host=riscv64-unknown-linux-gnu --prefix=<toolchain-installed-path>/sysroot/usr
make
make install
```
* Configure and build OpenJDK:
```
bash configure \
--with-boot-jdk=$BOOT_JDK \
--openjdk-target=riscv64-linux-gnu \
--with-sysroot=<toolchain-installed-path>/sysroot \
--with-toolchain-path=<toolchain-installed-path>/bin \
--with-extra-path=<toolchain-installed-path>/bin
make images
```
### Building for musl
Just like it's possible to cross-compile for a different CPU, it's possible to
@@ -1389,12 +1330,12 @@ it.
To use, setup an icecc network, and install icecc on the build machine. Then
run `configure` using `--enable-icecc`.
### Using the javac server
### Using sjavac
To speed up compilation of Java code, especially during incremental
compilations, the javac server is automatically enabled in the configuration
step by default. To explicitly enable or disable the javac server, use either
`--enable-javac-server` or `--disable-javac-server`.
To speed up compilation of Java code, especially during incremental compilations,
the sjavac server is automatically enabled in the configuration step by default.
To explicitly enable or disable sjavac, use either `--enable-javac-server`
or `--disable-javac-server`.
### Building the Right Target

File diff suppressed because it is too large Load Diff

View File

@@ -572,12 +572,8 @@ There are a few exceptions to this rule.
* `#include <new>` to use placement `new`, `std::nothrow`, and `std::nothrow_t`.
* `#include <limits>` to use `std::numeric_limits`.
* `#include <type_traits>` with some restrictions, listed below.
* `#include <cstddef>` to use `std::nullptr_t` and `std::max_align_t`.
Certain restrictions apply to the declarations provided by `<type_traits>`.
* The `alignof` operator should be used rather than `std::alignment_of<>`.
* `#include <type_traits>`.
* `#include <cstddef>` to use `std::nullptr_t`.
TODO: Rather than directly \#including (permitted) Standard Library
headers, use a convention of \#including wrapper headers (in some
@@ -655,51 +651,6 @@ constant members. Compilers having such bugs are no longer supported.
Except where an enum is semantically appropriate, new code should use
integral constants.
### alignas
_Alignment-specifiers_ (`alignas`
[n2341](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf))
are permitted, with restrictions.
_Alignment-specifiers_ are permitted when the requested alignment is a
_fundamental alignment_ (not greater than `alignof(std::max_align_t)`
[C++14 3.11/2](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2014/n4296.pdf)).
_Alignment-specifiers_ with an _extended alignment_ (greater than
`alignof(std::max_align_t)`
[C++14 3.11/3](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2014/n4296.pdf))
may only be used to align variables with static or automatic storage duration
([C++14 3.7.1, 3.7.3](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2014/n4296.pdf)).
As a consequence, _over-aligned types_ are forbidden; this may change if
HotSpot updates to using C++17 or later
([p0035r4](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2016/p0035r4.html)).
Large _extended alignments_ should be avoided, particularly for stack
allocated objects. What is a large value may depend on the platform and
configuration. There may also be hard limits for some platforms.
An _alignment-specifier_ must always be applied to a definition
([C++14 10.6.2/6](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2014/n4296.pdf)).
(C++ allows an _alignment-specifier_ to optionally also be applied to a
declaration, so long as the definition has equivalent alignment. There isn't
any known benefit from duplicating the alignment in a non-definition
declaration, so such duplication should be avoided in HotSpot code.)
Enumerations are forbidden from having _alignment-specifiers_. Aligned
enumerations were originally permitted but insufficiently specified, and were
later (C++20) removed
([CWG 2354](https://cplusplus.github.io/CWG/issues/2354.html)).
Permitting such usage in HotSpot now would just cause problems in the future.
_Alignment-specifiers_ are forbidden in `typedef` and _alias-declarations_.
This may work or may have worked in some versions of some compilers, but was
later (C++14) explicitly disallowed
([CWG 1437](https://cplusplus.github.io/CWG/issues/1437.html)).
The HotSpot macro `ATTRIBUTE_ALIGNED` provides similar capabilities for
platforms that define it. This macro predates the use by HotSpot of C++
versions providing `alignas`. New code should use `alignas`.
### thread_local
Avoid use of `thread_local`
@@ -1058,37 +1009,8 @@ and other supported compilers may not have anything similar.
[p0136r1]: http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/p0136r1.html
"p0136r1"
### Attributes
The use of some attributes
([n2761](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf))
(listed below) is permitted. (Note that some of the attributes defined in
that paper didn't make it into the final specification.)
Attributes are syntactically permitted in a broad set of locations, but
specific attributes are only permitted in a subset of those locations. In
some cases an attribute that appertains to a given element may be placed in
any of several locations with the same meaning. In those cases HotSpot has a
preferred location.
* An attribute that appertains to a function is placed at the beginning of the
function's declaration, rather than between the function name and the parameter
list.
Only the following attributes are permitted:
* `[[noreturn]]`
The following attributes are expressly forbidden:
* `[[carries_dependency]]` - Related to `memory_order_consume`.
* `[[deprecated]]` - Not relevant in HotSpot code.
### Additional Permitted Features
* `alignof`
([n2341](https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf))
* `constexpr`
([n2235](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf))
([n3652](https://isocpp.org/files/papers/N3652.html))
@@ -1186,6 +1108,10 @@ difficult to deal with and lead to surprises, as can destruction
ordering. HotSpot doesn't generally try to cleanup on exit, and
running destructors at exit can also lead to problems.
* `[[deprecated]]` attribute
([n3760](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3760.html)) &mdash;
Not relevant in HotSpot code.
* Avoid most operator overloading, preferring named functions. When
operator overloading is used, ensure the semantics conform to the
normal expected behavior of the operation.
@@ -1210,6 +1136,9 @@ features that have not yet been discussed.
* Member initializers and aggregates
([n3653](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3653.html))
* `[[noreturn]]` attribute
([n2761](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf))
* Rvalue references and move semantics
[ADL]: https://en.cppreference.com/w/cpp/language/adl

View File

@@ -5,19 +5,11 @@
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>Native/Unit Test Development Guidelines</title>
<style>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
div.columns{display: flex; gap: min(4vw, 1.5em);}
div.column{flex: auto; overflow-x: auto;}
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
ul.task-list{list-style: none;}
ul.task-list li input[type="checkbox"] {
width: 0.8em;
margin: 0 0.8em 0.2em -1.6em;
vertical-align: middle;
}
.display.math{display: block; text-align: center; margin: 0.5rem auto;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
</style>
<link rel="stylesheet" href="../make/data/docs-resources/resources/jdk-default.css" />
<!--[if lt IE 9]>
@@ -28,442 +20,174 @@
<header id="title-block-header">
<h1 class="title">Native/Unit Test Development Guidelines</h1>
</header>
<nav id="TOC" role="doc-toc">
<nav id="TOC">
<ul>
<li><a href="#good-test-properties" id="toc-good-test-properties">Good
test properties</a>
<ul>
<li><a href="#lightness" id="toc-lightness">Lightness</a></li>
<li><a href="#isolation" id="toc-isolation">Isolation</a></li>
<li><a href="#atomicity-and-self-containment"
id="toc-atomicity-and-self-containment">Atomicity and
self-containment</a></li>
<li><a href="#repeatability"
id="toc-repeatability">Repeatability</a></li>
<li><a href="#informativeness"
id="toc-informativeness">Informativeness</a></li>
<li><a href="#testing-instead-of-visiting"
id="toc-testing-instead-of-visiting">Testing instead of
visiting</a></li>
<li><a href="#nearness" id="toc-nearness">Nearness</a></li>
<li><a href="#good-test-properties">Good test properties</a><ul>
<li><a href="#lightness">Lightness</a></li>
<li><a href="#isolation">Isolation</a></li>
<li><a href="#atomicity-and-self-containment">Atomicity and self-containment</a></li>
<li><a href="#repeatability">Repeatability</a></li>
<li><a href="#informativeness">Informativeness</a></li>
<li><a href="#testing-instead-of-visiting">Testing instead of visiting</a></li>
<li><a href="#nearness">Nearness</a></li>
</ul></li>
<li><a href="#asserts" id="toc-asserts">Asserts</a>
<ul>
<li><a href="#several-checks" id="toc-several-checks">Several
checks</a></li>
<li><a href="#first-parameter-is-expected-value"
id="toc-first-parameter-is-expected-value">First parameter is expected
value</a></li>
<li><a href="#floating-point-comparison"
id="toc-floating-point-comparison">Floating-point comparison</a></li>
<li><a href="#c-string-comparison" id="toc-c-string-comparison">C string
comparison</a></li>
<li><a href="#error-messages" id="toc-error-messages">Error
messages</a></li>
<li><a href="#uncluttered-output"
id="toc-uncluttered-output">Uncluttered output</a></li>
<li><a href="#failures-propagation"
id="toc-failures-propagation">Failures propagation</a></li>
<li><a href="#asserts">Asserts</a><ul>
<li><a href="#several-checks">Several checks</a></li>
<li><a href="#first-parameter-is-expected-value">First parameter is expected value</a></li>
<li><a href="#floating-point-comparison">Floating-point comparison</a></li>
<li><a href="#c-string-comparison">C string comparison</a></li>
<li><a href="#error-messages">Error messages</a></li>
<li><a href="#uncluttered-output">Uncluttered output</a></li>
<li><a href="#failures-propagation">Failures propagation</a></li>
</ul></li>
<li><a href="#naming-and-grouping" id="toc-naming-and-grouping">Naming
and Grouping</a>
<ul>
<li><a href="#test-group-names" id="toc-test-group-names">Test group
names</a></li>
<li><a href="#filename" id="toc-filename">Filename</a></li>
<li><a href="#file-location" id="toc-file-location">File
location</a></li>
<li><a href="#test-names" id="toc-test-names">Test names</a></li>
<li><a href="#fixture-classes" id="toc-fixture-classes">Fixture
classes</a></li>
<li><a href="#friend-classes" id="toc-friend-classes">Friend
classes</a></li>
<li><a href="#oscpu-specific-tests" id="toc-oscpu-specific-tests">OS/CPU
specific tests</a></li>
<li><a href="#naming-and-grouping">Naming and Grouping</a><ul>
<li><a href="#test-group-names">Test group names</a></li>
<li><a href="#filename">Filename</a></li>
<li><a href="#file-location">File location</a></li>
<li><a href="#test-names">Test names</a></li>
<li><a href="#fixture-classes">Fixture classes</a></li>
<li><a href="#friend-classes">Friend classes</a></li>
<li><a href="#oscpu-specific-tests">OS/CPU specific tests</a></li>
</ul></li>
<li><a href="#miscellaneous" id="toc-miscellaneous">Miscellaneous</a>
<ul>
<li><a href="#hotspot-style" id="toc-hotspot-style">Hotspot
style</a></li>
<li><a href="#codetest-metrics" id="toc-codetest-metrics">Code/test
metrics</a></li>
<li><a href="#access-to-non-public-members"
id="toc-access-to-non-public-members">Access to non-public
members</a></li>
<li><a href="#death-tests" id="toc-death-tests">Death tests</a></li>
<li><a href="#external-flags" id="toc-external-flags">External
flags</a></li>
<li><a href="#test-specific-flags"
id="toc-test-specific-flags">Test-specific flags</a></li>
<li><a href="#flag-restoring" id="toc-flag-restoring">Flag
restoring</a></li>
<li><a href="#googletest-documentation"
id="toc-googletest-documentation">GoogleTest documentation</a></li>
<li><a href="#miscellaneous">Miscellaneous</a><ul>
<li><a href="#hotspot-style">Hotspot style</a></li>
<li><a href="#codetest-metrics">Code/test metrics</a></li>
<li><a href="#access-to-non-public-members">Access to non-public members</a></li>
<li><a href="#death-tests">Death tests</a></li>
<li><a href="#external-flags">External flags</a></li>
<li><a href="#test-specific-flags">Test-specific flags</a></li>
<li><a href="#flag-restoring">Flag restoring</a></li>
<li><a href="#googletest-documentation">GoogleTest documentation</a></li>
</ul></li>
<li><a href="#todo" id="toc-todo">TODO</a></li>
<li><a href="#todo">TODO</a></li>
</ul>
</nav>
<p>The purpose of these guidelines is to establish a shared vision on
what kind of native tests and how we want to develop them for Hotspot
using GoogleTest. Hence these guidelines include style items as well as
test approach items.</p>
<p>First section of this document describes properties of good tests
which are common for almost all types of test regardless of language,
framework, etc. Further sections provide recommendations to achieve
those properties and other HotSpot and/or GoogleTest specific
guidelines.</p>
<p>The purpose of these guidelines is to establish a shared vision on what kind of native tests and how we want to develop them for Hotspot using GoogleTest. Hence these guidelines include style items as well as test approach items.</p>
<p>First section of this document describes properties of good tests which are common for almost all types of test regardless of language, framework, etc. Further sections provide recommendations to achieve those properties and other HotSpot and/or GoogleTest specific guidelines.</p>
<h2 id="good-test-properties">Good test properties</h2>
<h3 id="lightness">Lightness</h3>
<p>Use the most lightweight type of tests.</p>
<p>In Hotspot, there are 3 different types of tests regarding their
dependency on a JVM, each next level is slower than previous</p>
<p>In Hotspot, there are 3 different types of tests regarding their dependency on a JVM, each next level is slower than previous</p>
<ul>
<li><p><code>TEST</code> : a test does not depend on a JVM</p></li>
<li><p><code>TEST_VM</code> : a test does depend on an initialized JVM,
but are supposed not to break a JVM, i.e. leave it in a workable
state.</p></li>
<li><p><code>TEST_OTHER_VM</code> : a test depends on a JVM and requires
a freshly initialized JVM or leaves a JVM in non-workable state</p></li>
<li><p><code>TEST_VM</code> : a test does depend on an initialized JVM, but are supposed not to break a JVM, i.e. leave it in a workable state.</p></li>
<li><p><code>TEST_OTHER_VM</code> : a test depends on a JVM and requires a freshly initialized JVM or leaves a JVM in non-workable state</p></li>
</ul>
<h3 id="isolation">Isolation</h3>
<p>Tests have to be isolated: not to have visible side-effects,
influences on other tests results.</p>
<p>Results of one test should not depend on test execution order, other
tests, otherwise it is becoming almost impossible to find out why a test
failed. Due to hotspot-specific, it is not so easy to get a full
isolation, e.g. we share an initialized JVM between all
<code>TEST_VM</code> tests, so if your test changes JVM's state too
drastically and does not change it back, you had better consider
<code>TEST_OTHER_VM</code>.</p>
<h3 id="atomicity-and-self-containment">Atomicity and
self-containment</h3>
<p>Tests should be <em>atomic</em> and <em>self-contained</em> at the
same time.</p>
<p>One test should check a particular part of a class, subsystem,
functionality, etc. Then it is quite easy to determine what parts of a
product are broken basing on test failures. On the other hand, a test
should test that part more-or-less entirely, because when one sees a
test <code>FooTest::bar</code>, they assume all aspects of bar from
<code>Foo</code> are tested.</p>
<p>However, it is impossible to cover all aspects even of a method, not
to mention a subsystem. In such cases, it is recommended to have several
tests, one for each aspect of a thing under test. For example one test
to tests how <code>Foo::bar</code> works if an argument is
<code>null</code>, another test to test how it works if an argument is
acceptable but <code>Foo</code> is not in the right state to accept it
and so on. This helps not only to make tests atomic, self-contained but
also makes test name self-descriptive (discussed in more details in <a
href="#test-names">Test names</a>).</p>
<p>Tests have to be isolated: not to have visible side-effects, influences on other tests results.</p>
<p>Results of one test should not depend on test execution order, other tests, otherwise it is becoming almost impossible to find out why a test failed. Due to hotspot-specific, it is not so easy to get a full isolation, e.g. we share an initialized JVM between all <code>TEST_VM</code> tests, so if your test changes JVM's state too drastically and does not change it back, you had better consider <code>TEST_OTHER_VM</code>.</p>
<h3 id="atomicity-and-self-containment">Atomicity and self-containment</h3>
<p>Tests should be <em>atomic</em> and <em>self-contained</em> at the same time.</p>
<p>One test should check a particular part of a class, subsystem, functionality, etc. Then it is quite easy to determine what parts of a product are broken basing on test failures. On the other hand, a test should test that part more-or-less entirely, because when one sees a test <code>FooTest::bar</code>, they assume all aspects of bar from <code>Foo</code> are tested.</p>
<p>However, it is impossible to cover all aspects even of a method, not to mention a subsystem. In such cases, it is recommended to have several tests, one for each aspect of a thing under test. For example one test to tests how <code>Foo::bar</code> works if an argument is <code>null</code>, another test to test how it works if an argument is acceptable but <code>Foo</code> is not in the right state to accept it and so on. This helps not only to make tests atomic, self-contained but also makes test name self-descriptive (discussed in more details in <a href="#test-names">Test names</a>).</p>
<h3 id="repeatability">Repeatability</h3>
<p>Tests have to be repeatable.</p>
<p>Reproducibility is very crucial for a test. No one likes sporadic
test failures, they are hard to investigate, fix and verify a fix.</p>
<p>In some cases, it is quite hard to write a 100% repeatable test,
since besides a test there can be other moving parts, e.g. in case of
<code>TEST_VM</code> there are several concurrently running threads.
Despite this, we should try to make a test as reproducible as
possible.</p>
<p>Reproducibility is very crucial for a test. No one likes sporadic test failures, they are hard to investigate, fix and verify a fix.</p>
<p>In some cases, it is quite hard to write a 100% repeatable test, since besides a test there can be other moving parts, e.g. in case of <code>TEST_VM</code> there are several concurrently running threads. Despite this, we should try to make a test as reproducible as possible.</p>
<h3 id="informativeness">Informativeness</h3>
<p>In case of a failure, a test should be as <em>informative</em> as
possible.</p>
<p>Having more information about a test failure than just compared
values can be very useful for failure troubleshooting, it can reduce or
even completely eliminate debugging hours. This is even more important
in case of not 100% reproducible failures.</p>
<p>Achieving this property, one can easily make a test too verbose, so
it will be really hard to find useful information in the ocean of
useless information. Hence they should not only think about how to
provide <a href="#error-messages">good information</a>, but also <a
href="#uncluttered-output">when to do it</a>.</p>
<p>In case of a failure, a test should be as <em>informative</em> as possible.</p>
<p>Having more information about a test failure than just compared values can be very useful for failure troubleshooting, it can reduce or even completely eliminate debugging hours. This is even more important in case of not 100% reproducible failures.</p>
<p>Achieving this property, one can easily make a test too verbose, so it will be really hard to find useful information in the ocean of useless information. Hence they should not only think about how to provide <a href="#error-messages">good information</a>, but also <a href="#uncluttered-output">when to do it</a>.</p>
<h3 id="testing-instead-of-visiting">Testing instead of visiting</h3>
<p>Tests should <em>test</em>.</p>
<p>It is not enough just to "visit" some code, a test should check that
code does that it has to do, compare return values with expected values,
check that desired side effects are done, and undesired are not, and so
on. In other words, a test should contain at least one GoogleTest
assertion and do not rely on JVM asserts.</p>
<p>Generally speaking to write a good test, one should create a model of
the system under tests, a model of possible bugs (or bugs which one
wants to find) and design tests using those models.</p>
<p>It is not enough just to &quot;visit&quot; some code, a test should check that code does that it has to do, compare return values with expected values, check that desired side effects are done, and undesired are not, and so on. In other words, a test should contain at least one GoogleTest assertion and do not rely on JVM asserts.</p>
<p>Generally speaking to write a good test, one should create a model of the system under tests, a model of possible bugs (or bugs which one wants to find) and design tests using those models.</p>
<h3 id="nearness">Nearness</h3>
<p>Prefer having checks inside test code.</p>
<p>Not only does having test logic outside, e.g. verification method,
depending on asserts in product code contradict with several items above
but also decreases tests readability and stability. It is much easier
to understand that a test is testing when all testing logic is located
inside a test or nearby in shared test libraries. As a rule of thumb,
the closer a check to a test, the better.</p>
<p>Not only does having test logic outside, e.g. verification method, depending on asserts in product code contradict with several items above but also decreases tests readability and stability. It is much easier to understand that a test is testing when all testing logic is located inside a test or nearby in shared test libraries. As a rule of thumb, the closer a check to a test, the better.</p>
<h2 id="asserts">Asserts</h2>
<h3 id="several-checks">Several checks</h3>
<p>Prefer <code>EXPECT</code> over <code>ASSERT</code> if possible.</p>
<p>This is related to the <a href="#informativeness">informativeness</a>
property of tests, information for other checks can help to better
localize a defects root-cause. One should use <code>ASSERT</code> if it
is impossible to continue test execution or if it does not make much
sense. Later in the text, <code>EXPECT</code> forms will be used to
refer to both <code>ASSERT/EXPECT</code>.</p>
<p>When it is possible to make several different checks, but impossible
to continue test execution if at least one check fails, you can use
<code>::testing::Test::HasNonfatalFailure()</code> function. The
recommended way to express that is
<code>ASSERT_FALSE(::testing::Test::HasNonfatalFailure())</code>.
Besides making it clear why a test is aborted, it also allows you to
provide more information about a failure.</p>
<h3 id="first-parameter-is-expected-value">First parameter is expected
value</h3>
<p>In all equality assertions, expected values should be passed as the
first parameter.</p>
<p>This convention is adopted by GoogleTest, and there is a slight
difference in how GoogleTest treats parameters, the most important one
is <code>null</code> detection. Due to different reasons,
<code>null</code> detection is enabled only for the first parameter,
that is to said <code>EXPECT_EQ(NULL, object)</code> checks that object
is <code>null</code>, while <code>EXPECT_EQ(object, NULL)</code> checks
that object equals to <code>NULL</code>, GoogleTest is very strict
regarding types of compared values so the latter will generates a
compile-time error.</p>
<p>This is related to the <a href="#informativeness">informativeness</a> property of tests, information for other checks can help to better localize a defects root-cause. One should use <code>ASSERT</code> if it is impossible to continue test execution or if it does not make much sense. Later in the text, <code>EXPECT</code> forms will be used to refer to both <code>ASSERT/EXPECT</code>.</p>
<p>When it is possible to make several different checks, but impossible to continue test execution if at least one check fails, you can use <code>::testing::Test::HasNonfatalFailure()</code> function. The recommended way to express that is <code>ASSERT_FALSE(::testing::Test::HasNonfatalFailure())</code>. Besides making it clear why a test is aborted, it also allows you to provide more information about a failure.</p>
<h3 id="first-parameter-is-expected-value">First parameter is expected value</h3>
<p>In all equality assertions, expected values should be passed as the first parameter.</p>
<p>This convention is adopted by GoogleTest, and there is a slight difference in how GoogleTest treats parameters, the most important one is <code>null</code> detection. Due to different reasons, <code>null</code> detection is enabled only for the first parameter, that is to said <code>EXPECT_EQ(NULL, object)</code> checks that object is <code>null</code>, while <code>EXPECT_EQ(object, NULL)</code> checks that object equals to <code>NULL</code>, GoogleTest is very strict regarding types of compared values so the latter will generates a compile-time error.</p>
<h3 id="floating-point-comparison">Floating-point comparison</h3>
<p>Use floating-point special macros to compare
<code>float/double</code> values.</p>
<p>Because of floating-point number representations and round-off
errors, regular equality comparison will not return true in most cases.
There are special <code>EXPECT_FLOAT_EQ/EXPECT_DOUBLE_EQ</code>
assertions which check that the distance between compared values is not
more than 4 ULPs, there is also <code>EXPECT_NEAR(v1, v2, eps)</code>
which checks that the absolute value of the difference between
<code>v1</code> and <code>v2</code> is not greater than
<code>eps</code>.</p>
<p>Use floating-point special macros to compare <code>float/double</code> values.</p>
<p>Because of floating-point number representations and round-off errors, regular equality comparison will not return true in most cases. There are special <code>EXPECT_FLOAT_EQ/EXPECT_DOUBLE_EQ</code> assertions which check that the distance between compared values is not more than 4 ULPs, there is also <code>EXPECT_NEAR(v1, v2, eps)</code> which checks that the absolute value of the difference between <code>v1</code> and <code>v2</code> is not greater than <code>eps</code>.</p>
<h3 id="c-string-comparison">C string comparison</h3>
<p>Use string special macros for C strings comparisons.</p>
<p><code>EXPECT_EQ</code> just compares pointers values, which is
hardly what one wants comparing C strings. GoogleTest provides
<code>EXPECT_STREQ</code> and <code>EXPECT_STRNE</code> macros to
compare C string contents. There are also case-insensitive versions
<code>EXPECT_STRCASEEQ</code>, <code>EXPECT_STRCASENE</code>.</p>
<p><code>EXPECT_EQ</code> just compares pointers values, which is hardly what one wants comparing C strings. GoogleTest provides <code>EXPECT_STREQ</code> and <code>EXPECT_STRNE</code> macros to compare C string contents. There are also case-insensitive versions <code>EXPECT_STRCASEEQ</code>, <code>EXPECT_STRCASENE</code>.</p>
<h3 id="error-messages">Error messages</h3>
<p>Provide informative, but not too verbose error messages.</p>
<p>All GoogleTest asserts print compared expressions and their values,
so there is no need to have them in error messages. Asserts print only
compared values, they do not print any of interim variables, e.g.
<code>ASSERT_TRUE((val1 == val2 &amp;&amp; isFail(foo(8)) || i == 18)</code>
prints only one value. If you use some complex predicates, please
consider <code>EXPECT_PRED*</code> or <code>EXPECT_FORMAT_PRED</code>
assertions family, they check that a predicate returns true/success and
print out all parameters values.</p>
<p>However in some cases, default information is not enough, a commonly
used example is an assert inside a loop, GoogleTest will not print
iteration values (unless it is an assert's parameter). Other
demonstrative examples are printing error code and a corresponding error
message; printing internal states which might have an impact on results.
One should add this information to assert message using
<code>&lt;&lt;</code> operator.</p>
<p>All GoogleTest asserts print compared expressions and their values, so there is no need to have them in error messages. Asserts print only compared values, they do not print any of interim variables, e.g. <code>ASSERT_TRUE((val1 == val2 &amp;&amp; isFail(foo(8)) || i == 18)</code> prints only one value. If you use some complex predicates, please consider <code>EXPECT_PRED*</code> or <code>EXPECT_FORMAT_PRED</code> assertions family, they check that a predicate returns true/success and print out all parameters values.</p>
<p>However in some cases, default information is not enough, a commonly used example is an assert inside a loop, GoogleTest will not print iteration values (unless it is an assert's parameter). Other demonstrative examples are printing error code and a corresponding error message; printing internal states which might have an impact on results. One should add this information to assert message using <code>&lt;&lt;</code> operator.</p>
<h3 id="uncluttered-output">Uncluttered output</h3>
<p>Print information only if it is needed.</p>
<p>Too verbose tests which print all information even if they pass are
very bad practice. They just pollute output, so it becomes harder to
find useful information. In order not print information till it is
really needed, one should consider saving it to a temporary buffer and
pass to an assert. <a
href="https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp"
class="uri">https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp</a>
has a good example how to do that.</p>
<p>Too verbose tests which print all information even if they pass are very bad practice. They just pollute output, so it becomes harder to find useful information. In order not print information till it is really needed, one should consider saving it to a temporary buffer and pass to an assert. <a href="https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp" class="uri">https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp</a> has a good example how to do that.</p>
<h3 id="failures-propagation">Failures propagation</h3>
<p>Wrap a subroutine call into <code>EXPECT_NO_FATAL_FAILURE</code>
macro to propagate failures.</p>
<p><code>ASSERT</code> and <code>FAIL</code> abort only the current
function, so if you have them in a subroutine, a test will not be
aborted after the subroutine even if <code>ASSERT</code> or
<code>FAIL</code> fails. You should call such subroutines in
<code>ASSERT_NO_FATAL_FAILURE</code> macro to propagate fatal failures
and abort a test. <code>(EXPECT|ASSERT)_NO_FATAL_FAILURE</code> can also
be used to provide more information.</p>
<p>Due to obvious reasons, there are no
<code>(EXPECT|ASSERT)_NO_NONFATAL_FAILURE</code> macros. However, if you
need to check if a subroutine generated a nonfatal failure (failed an
<code>EXPECT</code>), you can use
<code>::testing::Test::HasNonfatalFailure</code> function, or
<code>::testing::Test::HasFailure</code> function to check if a
subroutine generated any failures, see <a href="#several-checks">Several
checks</a>.</p>
<p>Wrap a subroutine call into <code>EXPECT_NO_FATAL_FAILURE</code> macro to propagate failures.</p>
<p><code>ASSERT</code> and <code>FAIL</code> abort only the current function, so if you have them in a subroutine, a test will not be aborted after the subroutine even if <code>ASSERT</code> or <code>FAIL</code> fails. You should call such subroutines in <code>ASSERT_NO_FATAL_FAILURE</code> macro to propagate fatal failures and abort a test. <code>(EXPECT|ASSERT)_NO_FATAL_FAILURE</code> can also be used to provide more information.</p>
<p>Due to obvious reasons, there are no <code>(EXPECT|ASSERT)_NO_NONFATAL_FAILURE</code> macros. However, if you need to check if a subroutine generated a nonfatal failure (failed an <code>EXPECT</code>), you can use <code>::testing::Test::HasNonfatalFailure</code> function, or <code>::testing::Test::HasFailure</code> function to check if a subroutine generated any failures, see <a href="#several-checks">Several checks</a>.</p>
<h2 id="naming-and-grouping">Naming and Grouping</h2>
<h3 id="test-group-names">Test group names</h3>
<p>Test group names should be in CamelCase, start and end with a letter.
A test group should be named after tested class, functionality,
subsystem, etc.</p>
<p>This naming scheme helps to find tests, filter them and simplifies
test failure analysis. For example, class <code>Foo</code> - test group
<code>Foo</code>, compiler logging subsystem - test group
<code>CompilerLogging</code>, G1 GC — test group <code>G1GC</code>, and
so forth.</p>
<p>Test group names should be in CamelCase, start and end with a letter. A test group should be named after tested class, functionality, subsystem, etc.</p>
<p>This naming scheme helps to find tests, filter them and simplifies test failure analysis. For example, class <code>Foo</code> - test group <code>Foo</code>, compiler logging subsystem - test group <code>CompilerLogging</code>, G1 GC — test group <code>G1GC</code>, and so forth.</p>
<h3 id="filename">Filename</h3>
<p>A test file must have <code>test_</code> prefix and <code>.cpp</code>
suffix.</p>
<p>Both are actually requirements from the current build system to
recognize your tests.</p>
<p>A test file must have <code>test_</code> prefix and <code>.cpp</code> suffix.</p>
<p>Both are actually requirements from the current build system to recognize your tests.</p>
<h3 id="file-location">File location</h3>
<p>Test file location should reflect a location of the tested part of
the product.</p>
<p>Test file location should reflect a location of the tested part of the product.</p>
<ul>
<li><p>All unit tests for a class from <code>foo/bar/baz.cpp</code>
should be placed <code>foo/bar/test_baz.cpp</code> in
<code>hotspot/test/native/</code> directory. Having all tests for a
class in one file is a common practice for unit tests, it helps to see
all existing tests at once, share functions and/or resources without
losing encapsulation.</p></li>
<li><p>For tests which test more than one class, directory hierarchy
should be the same as product hierarchy, and file name should reflect
the name of the tested subsystem/functionality. For example, if a
sub-system under tests belongs to <code>gc/g1</code>, tests should be
placed in <code>gc/g1</code> directory.</p></li>
<li><p>All unit tests for a class from <code>foo/bar/baz.cpp</code> should be placed <code>foo/bar/test_baz.cpp</code> in <code>hotspot/test/native/</code> directory. Having all tests for a class in one file is a common practice for unit tests, it helps to see all existing tests at once, share functions and/or resources without losing encapsulation.</p></li>
<li><p>For tests which test more than one class, directory hierarchy should be the same as product hierarchy, and file name should reflect the name of the tested subsystem/functionality. For example, if a sub-system under tests belongs to <code>gc/g1</code>, tests should be placed in <code>gc/g1</code> directory.</p></li>
</ul>
<p>Please note that framework prepends directory name to a test group
name. For example, if <code>TEST(foo, check_this)</code> and
<code>TEST(bar, check_that)</code> are defined in
<code>hotspot/test/native/gc/shared/test_foo.cpp</code> file, they will
be reported as <code>gc/shared/foo::check_this</code> and
<code>gc/shared/bar::check_that</code>.</p>
<p>Please note that framework prepends directory name to a test group name. For example, if <code>TEST(foo, check_this)</code> and <code>TEST(bar, check_that)</code> are defined in <code>hotspot/test/native/gc/shared/test_foo.cpp</code> file, they will be reported as <code>gc/shared/foo::check_this</code> and <code>gc/shared/bar::check_that</code>.</p>
<h3 id="test-names">Test names</h3>
<p>Test names should be in small_snake_case, start and end with a
letter. A test name should reflect that a test checks.</p>
<p>Such naming makes tests self-descriptive and helps a lot during the
whole test life cycle. It is easy to do test planning, test inventory,
to see what things are not tested, to review tests, to analyze test
failures, to evolve a test, etc. For example
<code>foo_return_0_if_name_is_null</code> is better than
<code>foo_sanity</code> or <code>foo_basic</code> or just
<code>foo</code>,
<code>humongous_objects_can_not_be_moved_by_young_gc</code> is better
than <code>ho_young_gc</code>.</p>
<p>Actually using underscore is against GoogleTest project convention,
because it can lead to illegal identifiers, however, this is too strict.
Restricting usage of underscore for test names only and prohibiting test
name starts or ends with an underscore are enough to be safe.</p>
<p>Test names should be in small_snake_case, start and end with a letter. A test name should reflect that a test checks.</p>
<p>Such naming makes tests self-descriptive and helps a lot during the whole test life cycle. It is easy to do test planning, test inventory, to see what things are not tested, to review tests, to analyze test failures, to evolve a test, etc. For example <code>foo_return_0_if_name_is_null</code> is better than <code>foo_sanity</code> or <code>foo_basic</code> or just <code>foo</code>, <code>humongous_objects_can_not_be_moved_by_young_gc</code> is better than <code>ho_young_gc</code>.</p>
<p>Actually using underscore is against GoogleTest project convention, because it can lead to illegal identifiers, however, this is too strict. Restricting usage of underscore for test names only and prohibiting test name starts or ends with an underscore are enough to be safe.</p>
<h3 id="fixture-classes">Fixture classes</h3>
<p>Fixture classes should be named after tested classes, subsystems, etc
(follow <a href="#test-group-names">Test group names rule</a>) and have
<code>Test</code> suffix to prevent class name conflicts.</p>
<p>Fixture classes should be named after tested classes, subsystems, etc (follow <a href="#test-group-names">Test group names rule</a>) and have <code>Test</code> suffix to prevent class name conflicts.</p>
<h3 id="friend-classes">Friend classes</h3>
<p>All test purpose friends should have either <code>Test</code> or
<code>Testable</code> suffix.</p>
<p>It greatly simplifies understanding of friendships purpose and
allows statically check that private members are not exposed
unexpectedly. Having <code>FooTest</code> as a friend of
<code>Foo</code> without any comments will be understood as a necessary
evil to get testability.</p>
<p>All test purpose friends should have either <code>Test</code> or <code>Testable</code> suffix.</p>
<p>It greatly simplifies understanding of friendships purpose and allows statically check that private members are not exposed unexpectedly. Having <code>FooTest</code> as a friend of <code>Foo</code> without any comments will be understood as a necessary evil to get testability.</p>
<h3 id="oscpu-specific-tests">OS/CPU specific tests</h3>
<p>Guard OS/CPU specific tests by <code>#ifdef</code> and have OS/CPU
name in filename.</p>
<p>For the time being, we do not support separate directories for OS,
CPU, OS-CPU specific tests, in case we will have lots of such tests, we
will change directory layout and build system to support that in the
same way it is done in hotspot.</p>
<p>Guard OS/CPU specific tests by <code>#ifdef</code> and have OS/CPU name in filename.</p>
<p>For the time being, we do not support separate directories for OS, CPU, OS-CPU specific tests, in case we will have lots of such tests, we will change directory layout and build system to support that in the same way it is done in hotspot.</p>
<h2 id="miscellaneous">Miscellaneous</h2>
<h3 id="hotspot-style">Hotspot style</h3>
<p>Abide the norms and rules accepted in Hotspot style guide.</p>
<p>Tests are a part of Hotspot, so everything (if applicable) we use for
Hotspot, should be used for tests as well. Those guidelines cover
test-specific things.</p>
<p>Tests are a part of Hotspot, so everything (if applicable) we use for Hotspot, should be used for tests as well. Those guidelines cover test-specific things.</p>
<h3 id="codetest-metrics">Code/test metrics</h3>
<p>Coverage information and other code/test metrics are quite useful to
decide what tests should be written, what tests should be improved and
what can be removed.</p>
<p>For unit tests, widely used and well-known coverage metric is branch
coverage, which provides good quality of tests with relatively easy test
development process. For other levels of testing, branch coverage is not
as good, and one should consider others metrics, e.g. transaction flow
coverage, data flow coverage.</p>
<p>Coverage information and other code/test metrics are quite useful to decide what tests should be written, what tests should be improved and what can be removed.</p>
<p>For unit tests, widely used and well-known coverage metric is branch coverage, which provides good quality of tests with relatively easy test development process. For other levels of testing, branch coverage is not as good, and one should consider others metrics, e.g. transaction flow coverage, data flow coverage.</p>
<h3 id="access-to-non-public-members">Access to non-public members</h3>
<p>Use explicit friend class to get access to non-public members.</p>
<p>We do not use GoogleTest macro to declare friendship relation,
because, from our point of view, it is less clear than an explicit
declaration.</p>
<p>Declaring a test fixture class as a friend class of a tested test is
the easiest and the clearest way to get access. However, it has some
disadvantages, here is some of them:</p>
<p>We do not use GoogleTest macro to declare friendship relation, because, from our point of view, it is less clear than an explicit declaration.</p>
<p>Declaring a test fixture class as a friend class of a tested test is the easiest and the clearest way to get access. However, it has some disadvantages, here is some of them:</p>
<ul>
<li>Each test has to be declared as a friend</li>
<li>Subclasses do not inheritance friendship relation</li>
</ul>
<p>In other words, it is harder to share code between tests. Hence if
you want to share code or expect it to be useful in other tests, you
should consider making members in a tested class protected and introduce
a shared test-only class which expose those members via public
functions, or even making members publicly accessible right away in a
product class. If it is not an option to change members visibility, one
can create a friend class which exposes members.</p>
<p>In other words, it is harder to share code between tests. Hence if you want to share code or expect it to be useful in other tests, you should consider making members in a tested class protected and introduce a shared test-only class which expose those members via public functions, or even making members publicly accessible right away in a product class. If it is not an option to change members visibility, one can create a friend class which exposes members.</p>
<h3 id="death-tests">Death tests</h3>
<p>You can not use death tests inside <code>TEST_OTHER_VM</code> and
<code>TEST_VM_ASSERT*</code>.</p>
<p>We tried to make Hotspot-GoogleTest integration as transparent as
possible, however, due to the current implementation of
<code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> tests, you
cannot use death test functionality in them. These tests are implemented
as GoogleTest death tests, and GoogleTest does not allow to have a death
test inside another death test.</p>
<p>You can not use death tests inside <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code>.</p>
<p>We tried to make Hotspot-GoogleTest integration as transparent as possible, however, due to the current implementation of <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> tests, you cannot use death test functionality in them. These tests are implemented as GoogleTest death tests, and GoogleTest does not allow to have a death test inside another death test.</p>
<h3 id="external-flags">External flags</h3>
<p>Passing external flags to a tested JVM is not supported.</p>
<p>The rationality of such design decision is to simplify both tests and
a test framework and to avoid failures related to incompatible flags
combination till there is a good solution for that. However there are
cases when one wants to test a JVM with specific flags combination,
<code>_JAVA_OPTIONS</code> environment variable can be used to do that.
Flags from <code>_JAVA_OPTIONS</code> will be used in
<code>TEST_VM</code>, <code>TEST_OTHER_VM</code> and
<code>TEST_VM_ASSERT*</code> tests.</p>
<p>The rationality of such design decision is to simplify both tests and a test framework and to avoid failures related to incompatible flags combination till there is a good solution for that. However there are cases when one wants to test a JVM with specific flags combination, <code>_JAVA_OPTIONS</code> environment variable can be used to do that. Flags from <code>_JAVA_OPTIONS</code> will be used in <code>TEST_VM</code>, <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> tests.</p>
<h3 id="test-specific-flags">Test-specific flags</h3>
<p>Passing flags to a tested JVM in <code>TEST_OTHER_VM</code> and
<code>TEST_VM_ASSERT*</code> should be possible, but is not implemented
yet.</p>
<p>Facility to pass test-specific flags is needed for system, regression
or other types of tests which require a fully initialized JVM in some
particular configuration, e.g. with Serial GC selected. There is no
support for such tests now, however, there is a plan to add that in
upcoming releases.</p>
<p>For now, if a test depends on flags values, it should have
<code>if (!&lt;flag&gt;) { return }</code> guards in the very beginning
and <code>@requires</code> comment similar to jtreg
<code>@requires</code> directive right before test macros. <a
href="https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp"
class="uri">https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp</a>
ha an example of this temporary workaround. It is important to follow
that pattern as it allows us to easily find all such tests and update
them as soon as there is an implementation of flag passing facility.</p>
<p>In long-term, we expect jtreg to support GoogleTest tests as first
class citizens, that is to say, jtreg will parse <span class="citation"
data-cites="requires">@requires</span> comments and filter out
inapplicable tests.</p>
<p>Passing flags to a tested JVM in <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> should be possible, but is not implemented yet.</p>
<p>Facility to pass test-specific flags is needed for system, regression or other types of tests which require a fully initialized JVM in some particular configuration, e.g. with Serial GC selected. There is no support for such tests now, however, there is a plan to add that in upcoming releases.</p>
<p>For now, if a test depends on flags values, it should have <code>if (!&lt;flag&gt;) { return }</code> guards in the very beginning and <code>@requires</code> comment similar to jtreg <code>@requires</code> directive right before test macros. <a href="https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp" class="uri">https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp</a> ha an example of this temporary workaround. It is important to follow that pattern as it allows us to easily find all such tests and update them as soon as there is an implementation of flag passing facility.</p>
<p>In long-term, we expect jtreg to support GoogleTest tests as first class citizens, that is to say, jtreg will parse <span class="citation" data-cites="requires">@requires</span> comments and filter out inapplicable tests.</p>
<h3 id="flag-restoring">Flag restoring</h3>
<p>Restore changed flags.</p>
<p>It is quite common for tests to configure JVM in a certain way
changing flags values. GoogleTest provides two ways to set up
environment before a test and restore it afterward: using either
constructor and destructor or <code>SetUp</code> and
<code>TearDown</code> functions. Both ways require to use a test fixture
class, which sometimes is too wordy. The simpler facilities like
<code>FLAG_GUARD</code> macro or <code>*FlagSetting</code> classes could
be used in such cases to restore/set values.</p>
<p>It is quite common for tests to configure JVM in a certain way changing flags values. GoogleTest provides two ways to set up environment before a test and restore it afterward: using either constructor and destructor or <code>SetUp</code> and <code>TearDown</code> functions. Both ways require to use a test fixture class, which sometimes is too wordy. The simpler facilities like <code>FLAG_GUARD</code> macro or <code>*FlagSetting</code> classes could be used in such cases to restore/set values.</p>
<p>Caveats:</p>
<ul>
<li><p>Changing a flags value could break the invariants between flags'
values and hence could lead to unexpected/unsupported JVM
state.</p></li>
<li><p><code>FLAG_SET_*</code> macros can change more than one flag (in
order to maintain invariants) so it is hard to predict what flags will
be changed and it makes restoring all changed flags a nontrivial task.
Thus in case one uses <code>FLAG_SET_*</code> macros, they should use
<code>TEST_OTHER_VM</code> test type.</p></li>
<li><p>Changing a flags value could break the invariants between flags' values and hence could lead to unexpected/unsupported JVM state.</p></li>
<li><p><code>FLAG_SET_*</code> macros can change more than one flag (in order to maintain invariants) so it is hard to predict what flags will be changed and it makes restoring all changed flags a nontrivial task. Thus in case one uses <code>FLAG_SET_*</code> macros, they should use <code>TEST_OTHER_VM</code> test type.</p></li>
</ul>
<h3 id="googletest-documentation">GoogleTest documentation</h3>
<p>In case you have any questions regarding GoogleTest itself, its
asserts, test declaration macros, other macros, etc, please consult its
documentation.</p>
<p>In case you have any questions regarding GoogleTest itself, its asserts, test declaration macros, other macros, etc, please consult its documentation.</p>
<h2 id="todo">TODO</h2>
<p>Although this document provides guidelines on the most important
parts of test development using GTest, it still misses a few items:</p>
<p>Although this document provides guidelines on the most important parts of test development using GTest, it still misses a few items:</p>
<ul>
<li><p>Examples, esp for <a href="#access-to-non-public-members">access
to non-public members</a></p></li>
<li><p>test types: purpose, drawbacks, limitation</p>
<li><p>Examples, esp for <a href="#access-to-non-public-members">access to non-public members</a></p></li>
<li>test types: purpose, drawbacks, limitation
<ul>
<li><code>TEST_VM</code></li>
<li><code>TEST_VM_F</code></li>
@@ -471,7 +195,7 @@ to non-public members</a></p></li>
<li><code>TEST_VM_ASSERT</code></li>
<li><code>TEST_VM_ASSERT_MSG</code></li>
</ul></li>
<li><p>Miscellaneous</p>
<li>Miscellaneous
<ul>
<li>Test libraries
<ul>
@@ -484,8 +208,7 @@ to non-public members</a></p></li>
<li>how to run tests in random order</li>
<li>how to run only specific tests</li>
<li>how to run each test separately</li>
<li>check that a test can find bugs it is supposed to by introducing
them</li>
<li>check that a test can find bugs it is supposed to by introducing them</li>
</ul></li>
<li>mocks/stubs/dependency injection</li>
<li>setUp/tearDown

View File

@@ -194,7 +194,7 @@ very bad practice. They just pollute output, so it becomes harder to
find useful information. In order not print information till it is
really needed, one should consider saving it to a temporary buffer and
pass to an assert.
<https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp>
<https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp>
has a good example how to do that.
### Failures propagation
@@ -383,7 +383,7 @@ upcoming releases.
For now, if a test depends on flags values, it should have `if
(!<flag>) { return }` guards in the very beginning and `@requires`
comment similar to jtreg `@requires` directive right before test macros.
<https://git.openjdk.org/jdk/blob/master/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp>
<https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp>
ha an example of this temporary workaround. It is important to follow
that pattern as it allows us to easily find all such tests and update
them as soon as there is an implementation of flag passing facility.

View File

@@ -5,19 +5,11 @@
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>IDE support in the JDK</title>
<style>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
div.columns{display: flex; gap: min(4vw, 1.5em);}
div.column{flex: auto; overflow-x: auto;}
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
ul.task-list{list-style: none;}
ul.task-list li input[type="checkbox"] {
width: 0.8em;
margin: 0 0.8em 0.2em -1.6em;
vertical-align: middle;
}
.display.math{display: block; text-align: center; margin: 0.5rem auto;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
</style>
<link rel="stylesheet" href="../make/data/docs-resources/resources/jdk-default.css" />
<!--[if lt IE 9]>
@@ -28,144 +20,41 @@
<header id="title-block-header">
<h1 class="title">IDE support in the JDK</h1>
</header>
<nav id="TOC" role="doc-toc">
<nav id="TOC">
<ul>
<li><a href="#introduction" id="toc-introduction">Introduction</a>
<ul>
<li><a href="#ide-support-for-native-code"
id="toc-ide-support-for-native-code">IDE support for native
code</a></li>
<li><a href="#ide-support-for-java-code"
id="toc-ide-support-for-java-code">IDE support for Java code</a></li>
<li><a href="#introduction">Introduction</a><ul>
<li><a href="#ide-support-for-native-code">IDE support for native code</a></li>
<li><a href="#ide-support-for-java-code">IDE support for Java code</a></li>
</ul></li>
</ul>
</nav>
<h2 id="introduction">Introduction</h2>
<p>When you are familiar with building and testing the JDK, you may want
to configure an IDE to work with the source code. The instructions
differ a bit depending on whether you are interested in working with the
native (C/C++) or the Java code.</p>
<p>When you are familiar with building and testing the JDK, you may want to configure an IDE to work with the source code. The instructions differ a bit depending on whether you are interested in working with the native (C/C++) or the Java code.</p>
<h3 id="ide-support-for-native-code">IDE support for native code</h3>
<p>There are a few ways to generate IDE configuration for the native
sources, depending on which IDE to use.</p>
<p>There are a few ways to generate IDE configuration for the native sources, depending on which IDE to use.</p>
<h4 id="visual-studio-code">Visual Studio Code</h4>
<p>The make system can generate a <a
href="https://code.visualstudio.com">Visual Studio Code</a> workspace
that has C/C++ source indexing configured correctly, as well as launcher
targets for tests and the Java launcher. After configuring, a workspace
for the configuration can be generated using:</p>
<p>The make system can generate a <a href="https://code.visualstudio.com">Visual Studio Code</a> workspace that has C/C++ source indexing configured correctly, as well as launcher targets for tests and the Java launcher. After configuring, a workspace for the configuration can be generated using:</p>
<pre class="shell"><code>make vscode-project</code></pre>
<p>This creates a file called <code>jdk.code-workspace</code> in the
build output folder. The full location will be printed after the
workspace has been generated. To use it, choose
<code>File -&gt; Open Workspace...</code> in Visual Studio Code.</p>
<p>This creates a file called <code>jdk.code-workspace</code> in the build output folder. The full location will be printed after the workspace has been generated. To use it, choose <code>File -&gt; Open Workspace...</code> in Visual Studio Code.</p>
<h5 id="alternative-indexers">Alternative indexers</h5>
<p>The main <code>vscode-project</code> target configures the default
C++ support in Visual Studio Code. There are also other source indexers
that can be installed, that may provide additional features. It's
currently possible to generate configuration for two such indexers, <a
href="https://clang.llvm.org/extra/clangd/">clangd</a> and <a
href="https://github.com/Andersbakken/rtags">rtags</a>. These can be
configured by appending the name of the indexer to the make target, such
as:</p>
<p>The main <code>vscode-project</code> target configures the default C++ support in Visual Studio Code. There are also other source indexers that can be installed, that may provide additional features. It's currently possible to generate configuration for two such indexers, <a href="https://clang.llvm.org/extra/clangd/">clangd</a> and <a href="https://github.com/Andersbakken/rtags">rtags</a>. These can be configured by appending the name of the indexer to the make target, such as:</p>
<pre class="shell"><code>make vscode-project-clangd</code></pre>
<p>Additional instructions for configuring the given indexer will be
displayed after the workspace has been generated.</p>
<p>Additional instructions for configuring the given indexer will be displayed after the workspace has been generated.</p>
<h4 id="visual-studio">Visual Studio</h4>
<p>The make system can generate a Visual Studio project for the Hotspot
native source. After configuring, the project is generated using:</p>
<p>The make system can generate a Visual Studio project for the Hotspot native source. After configuring, the project is generated using:</p>
<pre class="shell"><code>make hotspot-ide-project</code></pre>
<p>This creates a file named <code>jvm.vcxproj</code> in
<code>ide\hotspot-visualstudio</code> subfolder of the build output
folder. The file can be opened in Visual Studio via
<code>File -&gt; Open -&gt; Project/Solution</code>.</p>
<h4 id="eclipse-cdt">Eclipse CDT</h4>
<p>The make system can generate an Eclipse CDT Workspace that enables
Eclipse indexing for the C and C++ sources throughout the entire
codebase, as well as registering all common make targets to be runnable
from the Eclipse explorer. This can be done after configuring by
running:</p>
<pre><code>make eclipse-native-env</code></pre>
<p>After this is run, simply open and import the workspace in Eclipse
through
<code>File -&gt; Import -&gt; Projects from Folder or Archive</code> and
at <code>Import source</code> click on the directory
<code>ide\eclipse</code>, which can be found in the build output
folder.</p>
<p>If this doesn't work, you can also try
<code>File -&gt; Import -&gt; Existing Projects into Workspace</code>
instead.</p>
<p>Setting up an Eclipse Workspace is relatively lightweight compared to
other supported IDEs, but requires that your CDT installation has Cross
GCC support enabled at the moment, even if you aren't cross compiling.
The Visual C++ compiler is, at present, not supported as an indexer.</p>
<p>If desired, you can instead request make to only include indexing
support for just the Java Virtual Machine instead of the entire native
codebase, by running:</p>
<pre><code>make eclipse-hotspot-env</code></pre>
<p>If you think your particular Eclipse installation can handle the
strain, the make system also supports generating a combined Java and
C/C++ Workspace for Eclipse which can then conveniently switch between
Java and C/C++ natures during development by running:</p>
<pre><code>make eclipse-mixed-env</code></pre>
<p>Do note that this generates all features that come with both Java and
C/C++ natures.</p>
<p>By default, the Eclipse Workspace is located in the ide subdirectory
in the build output. To share the JDK's source directory with the
Eclipse Workspace, you can instead run:</p>
<pre><code>make eclipse-shared-&lt;ENV&gt;-env</code></pre>
<p>Eclipse support in the JDK is relatively new, so do keep in mind that
not everything may work at the moment. As such, the resulting Workspace
also has compilation database parsing support enabled, so you can pass
Eclipse the compile commands file (see below) if all else fails.</p>
<p>This creates a file named <code>jvm.vcxproj</code> in <code>ide\hotspot-visualstudio</code> subfolder of the build output folder. The file can be opened in Visual Studio via <code>File -&gt; Open -&gt; Project/Solution</code>.</p>
<h4 id="compilation-database">Compilation Database</h4>
<p>The make system can generate generic native code indexing support in
the form of a <a
href="https://clang.llvm.org/docs/JSONCompilationDatabase.html">Compilation
Database</a> that can be used by many different IDEs and source code
indexers.</p>
<p>The make system can generate generic native code indexing support in the form of a <a href="https://clang.llvm.org/docs/JSONCompilationDatabase.html">Compilation Database</a> that can be used by many different IDEs and source code indexers.</p>
<pre class="shell"><code>make compile-commands</code></pre>
<p>It's also possible to generate the Compilation Database for the
HotSpot source code only, which is a bit faster as it includes less
information.</p>
<p>It's also possible to generate the Compilation Database for the HotSpot source code only, which is a bit faster as it includes less information.</p>
<pre class="shell"><code>make compile-commands-hotspot</code></pre>
<h3 id="ide-support-for-java-code">IDE support for Java code</h3>
<h4 id="intellij-idea">IntelliJ IDEA</h4>
<p>The JDK project has a script that can be used for indexing the
project with IntelliJ. After configuring and building the JDK, an
IntelliJ workspace can be generated by running the following command in
the top-level folder of the cloned repository:</p>
<p>The JDK project has a script that can be used for indexing the project with IntelliJ. After configuring and building the JDK, an IntelliJ workspace can be generated by running the following command in the top-level folder of the cloned repository:</p>
<pre class="shell"><code>bash bin/idea.sh</code></pre>
<p>To use it, choose <code>File -&gt; Open...</code> in IntelliJ and
select the folder where you ran the above script.</p>
<p>Next, configure the project SDK in IntelliJ. Open
<code>File -&gt; Project Structure -&gt; Project</code> and select
<code>build/&lt;config&gt;/images/jdk</code> as the SDK to use.</p>
<p>In order to run the tests from the IDE, you can use the JTReg plugin.
Instructions for building and using the plugin can be found <a
href="https://github.com/openjdk/jtreg/tree/master/plugins/idea">here</a>.</p>
<h4 id="eclipse">Eclipse</h4>
<p>Eclipse JDT is a widely used Java IDE and has been for a very long
time, being a popular choice alongside IntelliJ IDEA for Java
development. Likewise, the JDK now includes support for developing its
Java sources with Eclipse, which can be achieved by setting up a Java
Workspace by running:</p>
<pre><code>make eclipse-java-env</code></pre>
<p>After the workspace has been generated you can import it in the same
way as you would with Eclipse CDT:</p>
<p>Follow
<code>File -&gt; Import -&gt; Projects from Folder or Archive</code> and
select the <code>ide\eclipse</code> directory in the build output folder
to import the newly created Java Workspace.</p>
<p>If doing so results in an error, you can also import the JDK via
<code>File -&gt; Import -&gt; Existing Projects into Workspace</code> as
a last resort.</p>
<p>Alternatively, if you want a Java Workspace inside the JDK's source
directory, you can instead run:</p>
<pre><code>make eclipse-shared-java-env</code></pre>
<p>As mentioned above for Eclipse CDT, you can create a combined Java
and C/C++ Workspace which can conveniently switch between Java and C/C++
natures during development by running:</p>
<pre><code>make eclipse-mixed-env</code></pre>
<p>To use it, choose <code>File -&gt; Open...</code> in IntelliJ and select the folder where you ran the above script.</p>
<p>Next, configure the project SDK in IntelliJ. Open <code>File -&gt; Project Structure -&gt; Project</code> and select <code>build/&lt;config&gt;/images/jdk</code> as the SDK to use.</p>
<p>In order to run the tests from the IDE, you can use the JTReg plugin. Instructions for building and using the plugin can be found <a href="https://github.com/openjdk/jtreg/tree/master/plugins/idea">here</a>.</p>
</body>
</html>

View File

@@ -56,63 +56,6 @@ This creates a file named `jvm.vcxproj` in `ide\hotspot-visualstudio`
subfolder of the build output folder. The file can be opened in Visual Studio
via `File -> Open -> Project/Solution`.
#### Eclipse CDT
The make system can generate an Eclipse CDT Workspace that enables Eclipse
indexing for the C and C++ sources throughout the entire codebase, as well as
registering all common make targets to be runnable from the Eclipse explorer.
This can be done after configuring by running:
```
make eclipse-native-env
```
After this is run, simply open and import the workspace in Eclipse through
`File -> Import -> Projects from Folder or Archive` and at
`Import source` click on the directory `ide\eclipse`, which can be
found in the build output folder.
If this doesn't work, you can also try
`File -> Import -> Existing Projects into Workspace`
instead.
Setting up an Eclipse Workspace is relatively lightweight compared to other
supported IDEs, but requires that your CDT installation has Cross GCC support
enabled at the moment, even if you aren't cross compiling. The Visual C++
compiler is, at present, not supported as an indexer.
If desired, you can instead request make to only include indexing support for
just the Java Virtual Machine instead of the entire native codebase, by running:
```
make eclipse-hotspot-env
```
If you think your particular Eclipse installation can handle the strain, the
make system also supports generating a combined Java and C/C++ Workspace for
Eclipse which can then conveniently switch between Java and C/C++ natures
during development by running:
```
make eclipse-mixed-env
```
Do note that this generates all features that come with both Java and C/C++
natures.
By default, the Eclipse Workspace is located in the ide subdirectory in the
build output. To share the JDK's source directory with the Eclipse Workspace,
you can instead run:
```
make eclipse-shared-<ENV>-env
```
Eclipse support in the JDK is relatively new, so do keep in mind that not
everything may work at the moment. As such, the resulting Workspace also
has compilation database parsing support enabled, so you can pass Eclipse
the compile commands file (see below) if all else fails.
#### Compilation Database
The make system can generate generic native code indexing support in the form of
@@ -153,40 +96,3 @@ as the SDK to use.
In order to run the tests from the IDE, you can use the JTReg plugin.
Instructions for building and using the plugin can be found
[here](https://github.com/openjdk/jtreg/tree/master/plugins/idea).
#### Eclipse
Eclipse JDT is a widely used Java IDE and has been for a very long time, being
a popular choice alongside IntelliJ IDEA for Java development. Likewise, the
JDK now includes support for developing its Java sources with Eclipse, which
can be achieved by setting up a Java Workspace by running:
```
make eclipse-java-env
```
After the workspace has been generated you can import it in the same way as
you would with Eclipse CDT:
Follow `File -> Import -> Projects from Folder or Archive` and select the
`ide\eclipse` directory in the build output folder to import the newly created
Java Workspace.
If doing so results in an error, you can also import the JDK via
`File -> Import -> Existing Projects into Workspace`
as a last resort.
Alternatively, if you want a Java Workspace inside the JDK's source directory,
you can instead run:
```
make eclipse-shared-java-env
```
As mentioned above for Eclipse CDT, you can create a combined Java and C/C++
Workspace which can conveniently switch between Java and C/C++ natures during
development by running:
```
make eclipse-mixed-env
```

View File

@@ -5,102 +5,54 @@
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>Testing the JDK</title>
<style>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
div.columns{display: flex; gap: min(4vw, 1.5em);}
div.column{flex: auto; overflow-x: auto;}
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
ul.task-list{list-style: none;}
ul.task-list li input[type="checkbox"] {
width: 0.8em;
margin: 0 0.8em 0.2em -1.6em;
vertical-align: middle;
}
.display.math{display: block; text-align: center; margin: 0.5rem auto;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
</style>
<link rel="stylesheet" href="../make/data/docs-resources/resources/jdk-default.css" />
<style type="text/css">pre, code, tt { color: #1d6ae5; }</style>
<!--[if lt IE 9]>
<script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
<![endif]-->
<style type="text/css">pre, code, tt { color: #1d6ae5; }</style>
</head>
<body>
<header id="title-block-header">
<h1 class="title">Testing the JDK</h1>
</header>
<nav id="TOC" role="doc-toc">
<nav id="TOC">
<ul>
<li><a href="#overview" id="toc-overview">Overview</a></li>
<li><a href="#running-tests-locally-with-make-test"
id="toc-running-tests-locally-with-make-test">Running tests locally with
<code>make test</code></a>
<ul>
<li><a href="#configuration"
id="toc-configuration">Configuration</a></li>
<li><a href="#using-make-test-the-run-test-framework">Using &quot;make test&quot; (the run-test framework)</a><ul>
<li><a href="#configuration">Configuration</a></li>
</ul></li>
<li><a href="#test-selection" id="toc-test-selection">Test selection</a>
<ul>
<li><a href="#common-test-groups" id="toc-common-test-groups">Common
Test Groups</a></li>
<li><a href="#jtreg" id="toc-jtreg">JTReg</a></li>
<li><a href="#gtest" id="toc-gtest">Gtest</a></li>
<li><a href="#microbenchmarks"
id="toc-microbenchmarks">Microbenchmarks</a></li>
<li><a href="#special-tests" id="toc-special-tests">Special
tests</a></li>
<li><a href="#test-selection">Test selection</a><ul>
<li><a href="#common-test-groups">Common Test Groups</a></li>
<li><a href="#jtreg">JTReg</a></li>
<li><a href="#gtest">Gtest</a></li>
<li><a href="#microbenchmarks">Microbenchmarks</a></li>
<li><a href="#special-tests">Special tests</a></li>
</ul></li>
<li><a href="#test-results-and-summary"
id="toc-test-results-and-summary">Test results and summary</a></li>
<li><a href="#test-suite-control" id="toc-test-suite-control">Test suite
control</a>
<ul>
<li><a href="#general-keywords-test_opts"
id="toc-general-keywords-test_opts">General keywords
(TEST_OPTS)</a></li>
<li><a href="#jtreg-keywords" id="toc-jtreg-keywords">JTReg
keywords</a></li>
<li><a href="#gtest-keywords" id="toc-gtest-keywords">Gtest
keywords</a></li>
<li><a href="#microbenchmark-keywords"
id="toc-microbenchmark-keywords">Microbenchmark keywords</a></li>
<li><a href="#test-results-and-summary">Test results and summary</a></li>
<li><a href="#test-suite-control">Test suite control</a><ul>
<li><a href="#general-keywords-test_opts">General keywords (TEST_OPTS)</a></li>
<li><a href="#jtreg-keywords">JTReg keywords</a></li>
<li><a href="#gtest-keywords">Gtest keywords</a></li>
<li><a href="#microbenchmark-keywords">Microbenchmark keywords</a></li>
</ul></li>
<li><a href="#notes-for-specific-tests"
id="toc-notes-for-specific-tests">Notes for Specific Tests</a>
<ul>
<li><a href="#docker-tests" id="toc-docker-tests">Docker Tests</a></li>
<li><a href="#non-us-locale" id="toc-non-us-locale">Non-US
locale</a></li>
<li><a href="#pkcs11-tests" id="toc-pkcs11-tests">PKCS11 Tests</a></li>
<li><a href="#testing-with-alternative-security-providers"
id="toc-testing-with-alternative-security-providers">Testing with
alternative security providers</a></li>
<li><a href="#client-ui-tests" id="toc-client-ui-tests">Client UI
Tests</a></li>
<li><a href="#notes-for-specific-tests">Notes for Specific Tests</a><ul>
<li><a href="#docker-tests">Docker Tests</a></li>
<li><a href="#non-us-locale">Non-US locale</a></li>
<li><a href="#pkcs11-tests">PKCS11 Tests</a></li>
<li><a href="#client-ui-tests">Client UI Tests</a></li>
</ul></li>
<li><a href="#editing-this-document"
id="toc-editing-this-document">Editing this document</a></li>
<li><a href="#editing-this-document">Editing this document</a></li>
</ul>
</nav>
<h2 id="overview">Overview</h2>
<p>The bulk of JDK tests use <a
href="https://openjdk.org/jtreg/">jtreg</a>, a regression test framework
and test runner built for the JDK's specific needs. Other test
frameworks are also used. The different test frameworks can be executed
directly, but there is also a set of make targets intended to simplify
the interface, and figure out how to run your tests for you.</p>
<h2 id="running-tests-locally-with-make-test">Running tests locally with
<code>make test</code></h2>
<p>This is the easiest way to get started. Assuming you've built the JDK
locally, execute:</p>
<pre><code>$ make test</code></pre>
<p>This will run a default set of tests against the JDK, and present you
with the results. <code>make test</code> is part of a family of
test-related make targets which simplify running tests, because they
invoke the various test frameworks for you. The "make test framework" is
simple to start with, but more complex ad-hoc combination of tests is
also possible. You can always invoke the test frameworks directly if you
want even more control.</p>
<h2 id="using-make-test-the-run-test-framework">Using &quot;make test&quot; (the run-test framework)</h2>
<p>This new way of running tests is developer-centric. It assumes that you have built a JDK locally and want to test it. Running common test targets is simple, and more complex ad-hoc combination of tests is possible. The user interface is forgiving, and clearly report errors it cannot resolve.</p>
<p>The main target <code>test</code> uses the jdk-image as the tested product. There is also an alternate target <code>exploded-test</code> that uses the exploded image instead. Not all tests will run successfully on the exploded image, but using this target can greatly improve rebuild times for certain workflows.</p>
<p>Previously, <code>make test</code> was used to invoke an old system for running tests, and <code>make run-test</code> was used for the new test framework. For backward compatibility with scripts and muscle memory, <code>run-test</code> (and variants like <code>exploded-run-test</code> or <code>run-test-tier1</code>) are kept as aliases.</p>
<p>Some example command-lines:</p>
<pre><code>$ make test-tier1
$ make test-jdk_lang JTREG=&quot;JOBS=8&quot;
@@ -110,215 +62,51 @@ $ make test TEST=&quot;hotspot:hotspot_gc&quot; JTREG=&quot;JOBS=1;TIMEOUT_FACTO
$ make test TEST=&quot;jtreg:test/hotspot:hotspot_gc test/hotspot/jtreg/native_sanity/JniVersion.java&quot;
$ make test TEST=&quot;micro:java.lang.reflect&quot; MICRO=&quot;FORK=1;WARMUP_ITER=2&quot;
$ make exploded-test TEST=tier2</code></pre>
<p>"tier1" and "tier2" refer to tiered testing, see further down. "TEST"
is a test selection argument which the make test framework will use to
try to find the tests you want. It iterates over the available test
frameworks, and if the test isn't present in one, it tries the next one.
The main target <code>test</code> uses the jdk-image as the tested
product. There is also an alternate target <code>exploded-test</code>
that uses the exploded image instead. Not all tests will run
successfully on the exploded image, but using this target can greatly
improve rebuild times for certain workflows.</p>
<p>Previously, <code>make test</code> was used to invoke an old system
for running tests, and <code>make run-test</code> was used for the new
test framework. For backward compatibility with scripts and muscle
memory, <code>run-test</code> and variants like
<code>exploded-run-test</code> or <code>run-test-tier1</code> are kept
as aliases.</p>
<h3 id="configuration">Configuration</h3>
<p>To be able to run JTReg tests, <code>configure</code> needs to know
where to find the JTReg test framework. If it is not picked up
automatically by configure, use the
<code>--with-jtreg=&lt;path to jtreg home&gt;</code> option to point to
the JTReg framework. Note that this option should point to the JTReg
home, i.e. the top directory, containing <code>lib/jtreg.jar</code> etc.
(An alternative is to set the <code>JT_HOME</code> environment variable
to point to the JTReg home before running <code>configure</code>.)</p>
<p>To be able to run microbenchmarks, <code>configure</code> needs to
know where to find the JMH dependency. Use
<code>--with-jmh=&lt;path to JMH jars&gt;</code> to point to a directory
containing the core JMH and transitive dependencies. The recommended
dependencies can be retrieved by running
<code>sh make/devkit/createJMHBundle.sh</code>, after which
<code>--with-jmh=build/jmh/jars</code> should work.</p>
<p>When tests fail or timeout, jtreg runs its failure handler to capture
necessary data from the system where the test was run. This data can
then be used to analyze the test failures. Collecting this data involves
running various commands (which are listed in files residing in
<code>test/failure_handler/src/share/conf</code>) and some of these
commands use <code>sudo</code>. If the system's <code>sudoers</code>
file isn't configured to allow running these commands, then it can
result in password being prompted during the failure handler execution.
Typically, when running locally, collecting this additional data isn't
always necessary. To disable running the failure handler, use
<code>--enable-jtreg-failure-handler=no</code> when running
<code>configure</code>. If, however, you want to let the failure handler
to run and don't want to be prompted for sudo password, then you can
configure your <code>sudoers</code> file appropriately. Please read the
necessary documentation of your operating system to see how to do that;
here we only show one possible way of doing that - edit the
<code>/etc/sudoers.d/sudoers</code> file to include the following
line:</p>
<p>To be able to run JTReg tests, <code>configure</code> needs to know where to find the JTReg test framework. If it is not picked up automatically by configure, use the <code>--with-jtreg=&lt;path to jtreg home&gt;</code> option to point to the JTReg framework. Note that this option should point to the JTReg home, i.e. the top directory, containing <code>lib/jtreg.jar</code> etc. (An alternative is to set the <code>JT_HOME</code> environment variable to point to the JTReg home before running <code>configure</code>.)</p>
<p>To be able to run microbenchmarks, <code>configure</code> needs to know where to find the JMH dependency. Use <code>--with-jmh=&lt;path to JMH jars&gt;</code> to point to a directory containing the core JMH and transitive dependencies. The recommended dependencies can be retrieved by running <code>sh make/devkit/createJMHBundle.sh</code>, after which <code>--with-jmh=build/jmh/jars</code> should work.</p>
<p>When tests fail or timeout, jtreg runs its failure handler to capture necessary data from the system where the test was run. This data can then be used to analyze the test failures. Collecting this data involves running various commands (which are listed in files residing in <code>test/failure_handler/src/share/conf</code>) and some of these commands use <code>sudo</code>. If the system's <code>sudoers</code> file isn't configured to allow running these commands, then it can result in password being prompted during the failure handler execution. Typically, when running locally, collecting this additional data isn't always necessary. To disable running the failure handler, use <code>--enable-jtreg-failure-handler=no</code> when running <code>configure</code>. If, however, you want to let the failure handler to run and don't want to be prompted for sudo password, then you can configure your <code>sudoers</code> file appropriately. Please read the necessary documentation of your operating system to see how to do that; here we only show one possible way of doing that - edit the <code>/etc/sudoers.d/sudoers</code> file to include the following line:</p>
<pre><code>johndoe ALL=(ALL) NOPASSWD: /sbin/dmesg</code></pre>
<p>This line configures <code>sudo</code> to <em>not</em> prompt for
password for the <code>/sbin/dmesg</code> command (this is one of the
commands that is listed in the files at
<code>test/failure_handler/src/share/conf</code>), for the user
<code>johndoe</code>. Here <code>johndoe</code> is the user account
under which the jtreg tests are run. Replace the username with a
relevant user account of your system.</p>
<p>This line configures <code>sudo</code> to <em>not</em> prompt for password for the <code>/sbin/dmesg</code> command (this is one of the commands that is listed in the files at <code>test/failure_handler/src/share/conf</code>), for the user <code>johndoe</code>. Here <code>johndoe</code> is the user account under which the jtreg tests are run. Replace the username with a relevant user account of your system.</p>
<h2 id="test-selection">Test selection</h2>
<p>All functionality is available using the <code>test</code> make
target. In this use case, the test or tests to be executed is controlled
using the <code>TEST</code> variable. To speed up subsequent test runs
with no source code changes, <code>test-only</code> can be used instead,
which do not depend on the source and test image build.</p>
<p>For some common top-level tests, direct make targets have been
generated. This includes all JTReg test groups, the hotspot gtest, and
custom tests (if present). This means that <code>make test-tier1</code>
is equivalent to <code>make test TEST="tier1"</code>, but the latter is
more tab-completion friendly. For more complex test runs, the
<code>test TEST="x"</code> solution needs to be used.</p>
<p>The test specifications given in <code>TEST</code> is parsed into
fully qualified test descriptors, which clearly and unambigously show
which tests will be run. As an example, <code>:tier1</code> will expand
to include all subcomponent test directories that define `tier1`,
for example:
<code>jtreg:$(TOPDIR)/test/hotspot/jtreg:tier1 jtreg:$(TOPDIR)/test/jdk:tier1 jtreg:$(TOPDIR)/test/langtools:tier1 ...</code>.
You can always submit a list of fully qualified test descriptors in the
<code>TEST</code> variable if you want to shortcut the parser.</p>
<p>All functionality is available using the <code>test</code> make target. In this use case, the test or tests to be executed is controlled using the <code>TEST</code> variable. To speed up subsequent test runs with no source code changes, <code>test-only</code> can be used instead, which do not depend on the source and test image build.</p>
<p>For some common top-level tests, direct make targets have been generated. This includes all JTReg test groups, the hotspot gtest, and custom tests (if present). This means that <code>make test-tier1</code> is equivalent to <code>make test TEST=&quot;tier1&quot;</code>, but the latter is more tab-completion friendly. For more complex test runs, the <code>test TEST=&quot;x&quot;</code> solution needs to be used.</p>
<p>The test specifications given in <code>TEST</code> is parsed into fully qualified test descriptors, which clearly and unambigously show which tests will be run. As an example, <code>:tier1</code> will expand to <code>jtreg:$(TOPDIR)/test/hotspot/jtreg:tier1 jtreg:$(TOPDIR)/test/jdk:tier1 jtreg:$(TOPDIR)/test/langtools:tier1 jtreg:$(TOPDIR)/test/nashorn:tier1 jtreg:$(TOPDIR)/test/jaxp:tier1</code>. You can always submit a list of fully qualified test descriptors in the <code>TEST</code> variable if you want to shortcut the parser.</p>
<h3 id="common-test-groups">Common Test Groups</h3>
<p>Ideally, all tests are run for every change but this may not be
practical due to the limited testing resources, the scope of the change,
etc.</p>
<p>The source tree currently defines a few common test groups in the
relevant <code>TEST.groups</code> files. There are test groups that
cover a specific component, for example <code>hotspot_gc</code>. It is a
good idea to look into <code>TEST.groups</code> files to get a sense
what tests are relevant to a particular JDK component.</p>
<p>Component-specific tests may miss some unintended consequences of a
change, so other tests should also be run. Again, it might be
impractical to run all tests, and therefore <em>tiered</em> test groups
exist. Tiered test groups are not component-specific, but rather cover
the significant parts of the entire JDK.</p>
<p>Multiple tiers allow balancing test coverage and testing costs. Lower
test tiers are supposed to contain the simpler, quicker and more stable
tests. Higher tiers are supposed to contain progressively more thorough,
slower, and sometimes less stable tests, or the tests that require
special configuration.</p>
<p>Contributors are expected to run the tests for the areas that are
changed, and the first N tiers they can afford to run, but at least
tier1.</p>
<p>Ideally, all tests are run for every change but this may not be practical due to the limited testing resources, the scope of the change, etc.</p>
<p>The source tree currently defines a few common test groups in the relevant <code>TEST.groups</code> files. There are test groups that cover a specific component, for example <code>hotspot_gc</code>. It is a good idea to look into <code>TEST.groups</code> files to get a sense what tests are relevant to a particular JDK component.</p>
<p>Component-specific tests may miss some unintended consequences of a change, so other tests should also be run. Again, it might be impractical to run all tests, and therefore <em>tiered</em> test groups exist. Tiered test groups are not component-specific, but rather cover the significant parts of the entire JDK.</p>
<p>Multiple tiers allow balancing test coverage and testing costs. Lower test tiers are supposed to contain the simpler, quicker and more stable tests. Higher tiers are supposed to contain progressively more thorough, slower, and sometimes less stable tests, or the tests that require special configuration.</p>
<p>Contributors are expected to run the tests for the areas that are changed, and the first N tiers they can afford to run, but at least tier1.</p>
<p>A brief description of the tiered test groups:</p>
<ul>
<li><p><code>tier1</code>: This is the lowest test tier. Multiple
developers run these tests every day. Because of the widespread use, the
tests in <code>tier1</code> are carefully selected and optimized to run
fast, and to run in the most stable manner. The test failures in
<code>tier1</code> are usually followed up on quickly, either with
fixes, or adding relevant tests to problem list. GitHub Actions
workflows, if enabled, run <code>tier1</code> tests.</p></li>
<li><p><code>tier2</code>: This test group covers even more ground.
These contain, among other things, tests that either run for too long to
be at <code>tier1</code>, or may require special configuration, or tests
that are less stable, or cover the broader range of non-core JVM and JDK
features/components(for example, XML).</p></li>
<li><p><code>tier3</code>: This test group includes more stressful
tests, the tests for corner cases not covered by previous tiers, plus
the tests that require GUIs. As such, this suite should either be run
with low concurrency (<code>TEST_JOBS=1</code>), or without headful
tests(<code>JTREG_KEYWORDS=\!headful</code>), or both.</p></li>
<li><p><code>tier4</code>: This test group includes every other test not
covered by previous tiers. It includes, for example,
<code>vmTestbase</code> suites for Hotspot, which run for many hours
even on large machines. It also runs GUI tests, so the same
<code>TEST_JOBS</code> and <code>JTREG_KEYWORDS</code> caveats
apply.</p></li>
<li><p><code>tier1</code>: This is the lowest test tier. Multiple developers run these tests every day. Because of the widespread use, the tests in <code>tier1</code> are carefully selected and optimized to run fast, and to run in the most stable manner. The test failures in <code>tier1</code> are usually followed up on quickly, either with fixes, or adding relevant tests to problem list. GitHub Actions workflows, if enabled, run <code>tier1</code> tests.</p></li>
<li><p><code>tier2</code>: This test group covers even more ground. These contain, among other things, tests that either run for too long to be at <code>tier1</code>, or may require special configuration, or tests that are less stable, or cover the broader range of non-core JVM and JDK features/components (for example, XML).</p></li>
<li><p><code>tier3</code>: This test group includes more stressful tests, the tests for corner cases not covered by previous tiers, plus the tests that require GUIs. As such, this suite should either be run with low concurrency (<code>TEST_JOBS=1</code>), or without headful tests (<code>JTREG_KEYWORDS=\!headful</code>), or both.</p></li>
<li><p><code>tier4</code>: This test group includes every other test not covered by previous tiers. It includes, for example, <code>vmTestbase</code> suites for Hotspot, which run for many hours even on large machines. It also runs GUI tests, so the same <code>TEST_JOBS</code> and <code>JTREG_KEYWORDS</code> caveats apply.</p></li>
</ul>
<h3 id="jtreg">JTReg</h3>
<p>JTReg tests can be selected either by picking a JTReg test group, or
a selection of files or directories containing JTReg tests.
Documentation can be found at <a
href="https://openjdk.org/jtreg/">https://openjdk.org/jtreg/</a>, note
especially the extensive <a
href="https://openjdk.org/jtreg/faq.html">FAQ</a>.</p>
<p>JTReg test groups can be specified either without a test root, e.g.
<code>:tier1</code> (or <code>tier1</code>, the initial colon is
optional), or with, e.g. <code>hotspot:tier1</code>,
<code>test/jdk:jdk_util</code> or
<code>$(TOPDIR)/test/hotspot/jtreg:hotspot_all</code>. The test root can
be specified either as an absolute path, or a path relative to the JDK
top directory, or the <code>test</code> directory. For simplicity, the
hotspot JTReg test root, which really is <code>hotspot/jtreg</code> can
be abbreviated as just <code>hotspot</code>.</p>
<p>When specified without a test root, all matching groups from all test
roots will be added. Otherwise, only the group from the specified test
root will be added.</p>
<p>Individual JTReg tests or directories containing JTReg tests can also
be specified, like
<code>test/hotspot/jtreg/native_sanity/JniVersion.java</code> or
<code>hotspot/jtreg/native_sanity</code>. Just like for test root
selection, you can either specify an absolute path (which can even point
to JTReg tests outside the source tree), or a path relative to either
the JDK top directory or the <code>test</code> directory.
<code>hotspot</code> can be used as an alias for
<code>hotspot/jtreg</code> here as well.</p>
<p>As long as the test groups or test paths can be uniquely resolved,
you do not need to enter the <code>jtreg:</code> prefix. If this is not
possible, or if you want to use a fully qualified test descriptor, add
<code>jtreg:</code>, e.g.
<code>jtreg:test/hotspot/jtreg/native_sanity</code>.</p>
<p>JTReg tests can be selected either by picking a JTReg test group, or a selection of files or directories containing JTReg tests. Documentation can be found at <a href="https://openjdk.org/jtreg/">https://openjdk.org/jtreg/</a>, note especially the extensive <a href="https://openjdk.org/jtreg/faq.html">FAQ</a>.</p>
<p>JTReg test groups can be specified either without a test root, e.g. <code>:tier1</code> (or <code>tier1</code>, the initial colon is optional), or with, e.g. <code>hotspot:tier1</code>, <code>test/jdk:jdk_util</code> or <code>$(TOPDIR)/test/hotspot/jtreg:hotspot_all</code>. The test root can be specified either as an absolute path, or a path relative to the JDK top directory, or the <code>test</code> directory. For simplicity, the hotspot JTReg test root, which really is <code>hotspot/jtreg</code> can be abbreviated as just <code>hotspot</code>.</p>
<p>When specified without a test root, all matching groups from all test roots will be added. Otherwise, only the group from the specified test root will be added.</p>
<p>Individual JTReg tests or directories containing JTReg tests can also be specified, like <code>test/hotspot/jtreg/native_sanity/JniVersion.java</code> or <code>hotspot/jtreg/native_sanity</code>. Just like for test root selection, you can either specify an absolute path (which can even point to JTReg tests outside the source tree), or a path relative to either the JDK top directory or the <code>test</code> directory. <code>hotspot</code> can be used as an alias for <code>hotspot/jtreg</code> here as well.</p>
<p>As long as the test groups or test paths can be uniquely resolved, you do not need to enter the <code>jtreg:</code> prefix. If this is not possible, or if you want to use a fully qualified test descriptor, add <code>jtreg:</code>, e.g. <code>jtreg:test/hotspot/jtreg/native_sanity</code>.</p>
<h3 id="gtest">Gtest</h3>
<p><strong>Note:</strong> To be able to run the Gtest suite, you need to
configure your build to be able to find a proper version of the gtest
source. For details, see the section <a
href="building.html#running-tests">"Running Tests" in the build
documentation</a>.</p>
<p>Since the Hotspot Gtest suite is so quick, the default is to run all
tests. This is specified by just <code>gtest</code>, or as a fully
qualified test descriptor <code>gtest:all</code>.</p>
<p>If you want, you can single out an individual test or a group of
tests, for instance <code>gtest:LogDecorations</code> or
<code>gtest:LogDecorations.level_test_vm</code>. This can be
particularly useful if you want to run a shaky test repeatedly.</p>
<p>For Gtest, there is a separate test suite for each JVM variant. The
JVM variant is defined by adding <code>/&lt;variant&gt;</code> to the
test descriptor, e.g. <code>gtest:Log/client</code>. If you specify no
variant, gtest will run once for each JVM variant present (e.g. server,
client). So if you only have the server JVM present, then
<code>gtest:all</code> will be equivalent to
<code>gtest:all/server</code>.</p>
<p>Since the Hotspot Gtest suite is so quick, the default is to run all tests. This is specified by just <code>gtest</code>, or as a fully qualified test descriptor <code>gtest:all</code>.</p>
<p>If you want, you can single out an individual test or a group of tests, for instance <code>gtest:LogDecorations</code> or <code>gtest:LogDecorations.level_test_vm</code>. This can be particularly useful if you want to run a shaky test repeatedly.</p>
<p>For Gtest, there is a separate test suite for each JVM variant. The JVM variant is defined by adding <code>/&lt;variant&gt;</code> to the test descriptor, e.g. <code>gtest:Log/client</code>. If you specify no variant, gtest will run once for each JVM variant present (e.g. server, client). So if you only have the server JVM present, then <code>gtest:all</code> will be equivalent to <code>gtest:all/server</code>.</p>
<h3 id="microbenchmarks">Microbenchmarks</h3>
<p>Which microbenchmarks to run is selected using a regular expression
following the <code>micro:</code> test descriptor, e.g.,
<code>micro:java.lang.reflect</code>. This delegates the test selection
to JMH, meaning package name, class name and even benchmark method names
can be used to select tests.</p>
<p>Using special characters like <code>|</code> in the regular
expression is possible, but needs to be escaped multiple times:
<code>micro:ArrayCopy\\\\\|reflect</code>.</p>
<p>Which microbenchmarks to run is selected using a regular expression following the <code>micro:</code> test descriptor, e.g., <code>micro:java.lang.reflect</code>. This delegates the test selection to JMH, meaning package name, class name and even benchmark method names can be used to select tests.</p>
<p>Using special characters like <code>|</code> in the regular expression is possible, but needs to be escaped multiple times: <code>micro:ArrayCopy\\\\\|reflect</code>.</p>
<h3 id="special-tests">Special tests</h3>
<p>A handful of odd tests that are not covered by any other testing
framework are accessible using the <code>special:</code> test
descriptor. Currently, this includes <code>failure-handler</code> and
<code>make</code>.</p>
<p>A handful of odd tests that are not covered by any other testing framework are accessible using the <code>special:</code> test descriptor. Currently, this includes <code>failure-handler</code> and <code>make</code>.</p>
<ul>
<li><p>Failure handler testing is run using
<code>special:failure-handler</code> or just
<code>failure-handler</code> as test descriptor.</p></li>
<li><p>Tests for the build system, including both makefiles and related
functionality, is run using <code>special:make</code> or just
<code>make</code> as test descriptor. This is equivalent to
<code>special:make:all</code>.</p>
<p>A specific make test can be run by supplying it as argument, e.g.
<code>special:make:idea</code>. As a special syntax, this can also be
expressed as <code>make-idea</code>, which allows for command lines as
<code>make test-make-idea</code>.</p></li>
<li><p>Failure handler testing is run using <code>special:failure-handler</code> or just <code>failure-handler</code> as test descriptor.</p></li>
<li><p>Tests for the build system, including both makefiles and related functionality, is run using <code>special:make</code> or just <code>make</code> as test descriptor. This is equivalent to <code>special:make:all</code>.</p>
<p>A specific make test can be run by supplying it as argument, e.g. <code>special:make:idea</code>. As a special syntax, this can also be expressed as <code>make-idea</code>, which allows for command lines as <code>make test-make-idea</code>.</p></li>
</ul>
<h2 id="test-results-and-summary">Test results and summary</h2>
<p>At the end of the test run, a summary of all tests run will be
presented. This will have a consistent look, regardless of what test
suites were used. This is a sample summary:</p>
<p>At the end of the test run, a summary of all tests run will be presented. This will have a consistent look, regardless of what test suites were used. This is a sample summary:</p>
<pre><code>==============================
Test summary
==============================
@@ -328,61 +116,20 @@ Test summary
jtreg:nashorn/test:tier1 133 133 0 0
==============================
TEST FAILURE</code></pre>
<p>Tests where the number of TOTAL tests does not equal the number of
PASSed tests will be considered a test failure. These are marked with
the <code>&gt;&gt; ... &lt;&lt;</code> marker for easy
identification.</p>
<p>The classification of non-passed tests differs a bit between test
suites. In the summary, ERROR is used as a catch-all for tests that
neither passed nor are classified as failed by the framework. This might
indicate test framework error, timeout or other problems.</p>
<p>In case of test failures, <code>make test</code> will exit with a
non-zero exit value.</p>
<p>All tests have their result stored in
<code>build/$BUILD/test-results/$TEST_ID</code>, where TEST_ID is a
path-safe conversion from the fully qualified test descriptor, e.g. for
<code>jtreg:jdk/test:tier1</code> the TEST_ID is
<code>jtreg_jdk_test_tier1</code>. This path is also printed in the log
at the end of the test run.</p>
<p>Additional work data is stored in
<code>build/$BUILD/test-support/$TEST_ID</code>. For some frameworks,
this directory might contain information that is useful in determining
the cause of a failed test.</p>
<p>Tests where the number of TOTAL tests does not equal the number of PASSed tests will be considered a test failure. These are marked with the <code>&gt;&gt; ... &lt;&lt;</code> marker for easy identification.</p>
<p>The classification of non-passed tests differs a bit between test suites. In the summary, ERROR is used as a catch-all for tests that neither passed nor are classified as failed by the framework. This might indicate test framework error, timeout or other problems.</p>
<p>In case of test failures, <code>make test</code> will exit with a non-zero exit value.</p>
<p>All tests have their result stored in <code>build/$BUILD/test-results/$TEST_ID</code>, where TEST_ID is a path-safe conversion from the fully qualified test descriptor, e.g. for <code>jtreg:jdk/test:tier1</code> the TEST_ID is <code>jtreg_jdk_test_tier1</code>. This path is also printed in the log at the end of the test run.</p>
<p>Additional work data is stored in <code>build/$BUILD/test-support/$TEST_ID</code>. For some frameworks, this directory might contain information that is useful in determining the cause of a failed test.</p>
<h2 id="test-suite-control">Test suite control</h2>
<p>It is possible to control various aspects of the test suites using
make control variables.</p>
<p>These variables use a keyword=value approach to allow multiple values
to be set. So, for instance,
<code>JTREG="JOBS=1;TIMEOUT_FACTOR=8"</code> will set the JTReg
concurrency level to 1 and the timeout factor to 8. This is equivalent
to setting <code>JTREG_JOBS=1 JTREG_TIMEOUT_FACTOR=8</code>, but using
the keyword format means that the <code>JTREG</code> variable is parsed
and verified for correctness, so <code>JTREG="TMIEOUT_FACTOR=8"</code>
would give an error, while <code>JTREG_TMIEOUT_FACTOR=8</code> would
just pass unnoticed.</p>
<p>To separate multiple keyword=value pairs, use <code>;</code>
(semicolon). Since the shell normally eats <code>;</code>, the
recommended usage is to write the assignment inside qoutes, e.g.
<code>JTREG="...;..."</code>. This will also make sure spaces are
preserved, as in
<code>JTREG="JAVA_OPTIONS=-XshowSettings -Xlog:gc+ref=debug"</code>.</p>
<p>(Other ways are possible, e.g. using backslash:
<code>JTREG=JOBS=1\;TIMEOUT_FACTOR=8</code>. Also, as a special
technique, the string <code>%20</code> will be replaced with space for
certain options, e.g.
<code>JTREG=JAVA_OPTIONS=-XshowSettings%20-Xlog:gc+ref=debug</code>.
This can be useful if you have layers of scripts and have trouble
getting proper quoting of command line arguments through.)</p>
<p>As far as possible, the names of the keywords have been standardized
between test suites.</p>
<p>It is possible to control various aspects of the test suites using make control variables.</p>
<p>These variables use a keyword=value approach to allow multiple values to be set. So, for instance, <code>JTREG=&quot;JOBS=1;TIMEOUT_FACTOR=8&quot;</code> will set the JTReg concurrency level to 1 and the timeout factor to 8. This is equivalent to setting <code>JTREG_JOBS=1 JTREG_TIMEOUT_FACTOR=8</code>, but using the keyword format means that the <code>JTREG</code> variable is parsed and verified for correctness, so <code>JTREG=&quot;TMIEOUT_FACTOR=8&quot;</code> would give an error, while <code>JTREG_TMIEOUT_FACTOR=8</code> would just pass unnoticed.</p>
<p>To separate multiple keyword=value pairs, use <code>;</code> (semicolon). Since the shell normally eats <code>;</code>, the recommended usage is to write the assignment inside qoutes, e.g. <code>JTREG=&quot;...;...&quot;</code>. This will also make sure spaces are preserved, as in <code>JTREG=&quot;JAVA_OPTIONS=-XshowSettings -Xlog:gc+ref=debug&quot;</code>.</p>
<p>(Other ways are possible, e.g. using backslash: <code>JTREG=JOBS=1\;TIMEOUT_FACTOR=8</code>. Also, as a special technique, the string <code>%20</code> will be replaced with space for certain options, e.g. <code>JTREG=JAVA_OPTIONS=-XshowSettings%20-Xlog:gc+ref=debug</code>. This can be useful if you have layers of scripts and have trouble getting proper quoting of command line arguments through.)</p>
<p>As far as possible, the names of the keywords have been standardized between test suites.</p>
<h3 id="general-keywords-test_opts">General keywords (TEST_OPTS)</h3>
<p>Some keywords are valid across different test suites. If you want to
run tests from multiple test suites, or just don't want to care which
test suite specific control variable to use, then you can use the
general TEST_OPTS control variable.</p>
<p>There are also some keywords that applies globally to the test runner
system, not to any specific test suites. These are also available as
TEST_OPTS keywords.</p>
<p>Some keywords are valid across different test suites. If you want to run tests from multiple test suites, or just don't want to care which test suite specific control variable to use, then you can use the general TEST_OPTS control variable.</p>
<p>There are also some keywords that applies globally to the test runner system, not to any specific test suites. These are also available as TEST_OPTS keywords.</p>
<h4 id="jobs">JOBS</h4>
<p>Currently only applies to JTReg.</p>
<h4 id="timeout_factor">TIMEOUT_FACTOR</h4>
@@ -394,57 +141,28 @@ TEST_OPTS keywords.</p>
<h4 id="aot_modules">AOT_MODULES</h4>
<p>Applies to JTReg and GTest.</p>
<h4 id="jcov">JCOV</h4>
<p>This keywords applies globally to the test runner system. If set to
<code>true</code>, it enables JCov coverage reporting for all tests run.
To be useful, the JDK under test must be run with a JDK built with JCov
instrumentation
(<code>configure --with-jcov=&lt;path to directory containing lib/jcov.jar&gt;</code>,
<code>make jcov-image</code>).</p>
<p>The simplest way to run tests with JCov coverage report is to use the
special target <code>jcov-test</code> instead of <code>test</code>, e.g.
<code>make jcov-test TEST=jdk_lang</code>. This will make sure the JCov
image is built, and that JCov reporting is enabled.</p>
<p>The JCov report is stored in
<code>build/$BUILD/test-results/jcov-output/report</code>.</p>
<p>Please note that running with JCov reporting can be very memory
intensive.</p>
<p>This keywords applies globally to the test runner system. If set to <code>true</code>, it enables JCov coverage reporting for all tests run. To be useful, the JDK under test must be run with a JDK built with JCov instrumentation (<code>configure --with-jcov=&lt;path to directory containing lib/jcov.jar&gt;</code>, <code>make jcov-image</code>).</p>
<p>The simplest way to run tests with JCov coverage report is to use the special target <code>jcov-test</code> instead of <code>test</code>, e.g. <code>make jcov-test TEST=jdk_lang</code>. This will make sure the JCov image is built, and that JCov reporting is enabled.</p>
<p>The JCov report is stored in <code>build/$BUILD/test-results/jcov-output/report</code>.</p>
<p>Please note that running with JCov reporting can be very memory intensive.</p>
<h4 id="jcov_diff_changeset">JCOV_DIFF_CHANGESET</h4>
<p>While collecting code coverage with JCov, it is also possible to find
coverage for only recently changed code. JCOV_DIFF_CHANGESET specifies a
source revision. A textual report will be generated showing coverage of
the diff between the specified revision and the repository tip.</p>
<p>The report is stored in
<code>build/$BUILD/test-results/jcov-output/diff_coverage_report</code>
file.</p>
<p>While collecting code coverage with JCov, it is also possible to find coverage for only recently changed code. JCOV_DIFF_CHANGESET specifies a source revision. A textual report will be generated showing coverage of the diff between the specified revision and the repository tip.</p>
<p>The report is stored in <code>build/$BUILD/test-results/jcov-output/diff_coverage_report</code> file.</p>
<h3 id="jtreg-keywords">JTReg keywords</h3>
<h4 id="jobs-1">JOBS</h4>
<p>The test concurrency (<code>-concurrency</code>).</p>
<p>Defaults to TEST_JOBS (if set by <code>--with-test-jobs=</code>),
otherwise it defaults to JOBS, except for Hotspot, where the default is
<em>number of CPU cores/2</em>, but never more than <em>memory size in
GB/2</em>.</p>
<p>Defaults to TEST_JOBS (if set by <code>--with-test-jobs=</code>), otherwise it defaults to JOBS, except for Hotspot, where the default is <em>number of CPU cores/2</em>, but never more than <em>memory size in GB/2</em>.</p>
<h4 id="timeout_factor-1">TIMEOUT_FACTOR</h4>
<p>The timeout factor (<code>-timeoutFactor</code>).</p>
<p>Defaults to 4.</p>
<h4 id="failure_handler_timeout">FAILURE_HANDLER_TIMEOUT</h4>
<p>Sets the argument <code>-timeoutHandlerTimeout</code> for JTReg. The
default value is 0. This is only valid if the failure handler is
built.</p>
<h4 id="jtreg_test_thread_factory">JTREG_TEST_THREAD_FACTORY</h4>
<p>Sets the <code>-testThreadFactory</code> for JTReg. It should be the
fully qualified classname of a class which implements
<code>java.util.concurrent.ThreadFactory</code>. One such implementation
class, named Virtual, is currently part of the JDK build in the
<code>test/jtreg_test_thread_factory/</code> directory. This class gets
compiled during the test image build. The implementation of the Virtual
class creates a new virtual thread for executing each test class.</p>
<p>Sets the argument <code>-timeoutHandlerTimeout</code> for JTReg. The default value is 0. This is only valid if the failure handler is built.</p>
<h4 id="test_mode">TEST_MODE</h4>
<p>The test mode (<code>agentvm</code> or <code>othervm</code>).</p>
<p>Defaults to <code>agentvm</code>.</p>
<h4 id="assert">ASSERT</h4>
<p>Enable asserts (<code>-ea -esa</code>, or none).</p>
<p>Set to <code>true</code> or <code>false</code>. If true, adds
<code>-ea -esa</code>. Defaults to true, except for hotspot.</p>
<p>Set to <code>true</code> or <code>false</code>. If true, adds <code>-ea -esa</code>. Defaults to true, except for hotspot.</p>
<h4 id="verbose">VERBOSE</h4>
<p>The verbosity level (<code>-verbose</code>).</p>
<p>Defaults to <code>fail,error,summary</code>.</p>
@@ -452,215 +170,92 @@ class creates a new virtual thread for executing each test class.</p>
<p>What test data to retain (<code>-retain</code>).</p>
<p>Defaults to <code>fail,error</code>.</p>
<h4 id="max_mem">MAX_MEM</h4>
<p>Limit memory consumption (<code>-Xmx</code> and
<code>-vmoption:-Xmx</code>, or none).</p>
<p>Limit memory consumption for JTReg test framework and VM under test.
Set to 0 to disable the limits.</p>
<p>Defaults to 512m, except for hotspot, where it defaults to 0 (no
limit).</p>
<p>Limit memory consumption (<code>-Xmx</code> and <code>-vmoption:-Xmx</code>, or none).</p>
<p>Limit memory consumption for JTReg test framework and VM under test. Set to 0 to disable the limits.</p>
<p>Defaults to 512m, except for hotspot, where it defaults to 0 (no limit).</p>
<h4 id="max_output">MAX_OUTPUT</h4>
<p>Set the property <code>javatest.maxOutputSize</code> for the
launcher, to change the default JTReg log limit.</p>
<p>Set the property <code>javatest.maxOutputSize</code> for the launcher, to change the default JTReg log limit.</p>
<h4 id="keywords">KEYWORDS</h4>
<p>JTReg keywords sent to JTReg using <code>-k</code>. Please be careful
in making sure that spaces and special characters (like <code>!</code>)
are properly quoted. To avoid some issues, the special value
<code>%20</code> can be used instead of space.</p>
<p>JTReg keywords sent to JTReg using <code>-k</code>. Please be careful in making sure that spaces and special characters (like <code>!</code>) are properly quoted. To avoid some issues, the special value <code>%20</code> can be used instead of space.</p>
<h4 id="extra_problem_lists">EXTRA_PROBLEM_LISTS</h4>
<p>Use additional problem lists file or files, in addition to the
default ProblemList.txt located at the JTReg test roots.</p>
<p>If multiple file names are specified, they should be separated by
space (or, to help avoid quoting issues, the special value
<code>%20</code>).</p>
<p>The file names should be either absolute, or relative to the JTReg
test root of the tests to be run.</p>
<p>Use additional problem lists file or files, in addition to the default ProblemList.txt located at the JTReg test roots.</p>
<p>If multiple file names are specified, they should be separated by space (or, to help avoid quoting issues, the special value <code>%20</code>).</p>
<p>The file names should be either absolute, or relative to the JTReg test root of the tests to be run.</p>
<h4 id="run_problem_lists">RUN_PROBLEM_LISTS</h4>
<p>Use the problem lists to select tests instead of excluding them.</p>
<p>Set to <code>true</code> or <code>false</code>. If <code>true</code>,
JTReg will use <code>-match:</code> option, otherwise
<code>-exclude:</code> will be used. Default is <code>false</code>.</p>
<p>Set to <code>true</code> or <code>false</code>. If <code>true</code>, JTReg will use <code>-match:</code> option, otherwise <code>-exclude:</code> will be used. Default is <code>false</code>.</p>
<h4 id="options">OPTIONS</h4>
<p>Additional options to the JTReg test framework.</p>
<p>Use <code>JTREG="OPTIONS=--help all"</code> to see all available
JTReg options.</p>
<p>Use <code>JTREG=&quot;OPTIONS=--help all&quot;</code> to see all available JTReg options.</p>
<h4 id="java_options-1">JAVA_OPTIONS</h4>
<p>Additional Java options for running test classes (sent to JTReg as
<code>-javaoption</code>).</p>
<p>Additional Java options for running test classes (sent to JTReg as <code>-javaoption</code>).</p>
<h4 id="vm_options-1">VM_OPTIONS</h4>
<p>Additional Java options to be used when compiling and running classes
(sent to JTReg as <code>-vmoption</code>).</p>
<p>This option is only needed in special circumstances. To pass Java
options to your test classes, use <code>JAVA_OPTIONS</code>.</p>
<p>Additional Java options to be used when compiling and running classes (sent to JTReg as <code>-vmoption</code>).</p>
<p>This option is only needed in special circumstances. To pass Java options to your test classes, use <code>JAVA_OPTIONS</code>.</p>
<h4 id="launcher_options">LAUNCHER_OPTIONS</h4>
<p>Additional Java options that are sent to the java launcher that
starts the JTReg harness.</p>
<p>Additional Java options that are sent to the java launcher that starts the JTReg harness.</p>
<h4 id="aot_modules-1">AOT_MODULES</h4>
<p>Generate AOT modules before testing for the specified module, or set
of modules. If multiple modules are specified, they should be separated
by space (or, to help avoid quoting issues, the special value
<code>%20</code>).</p>
<p>Generate AOT modules before testing for the specified module, or set of modules. If multiple modules are specified, they should be separated by space (or, to help avoid quoting issues, the special value <code>%20</code>).</p>
<h4 id="retry_count">RETRY_COUNT</h4>
<p>Retry failed tests up to a set number of times, until they pass. This
allows to pass the tests with intermittent failures. Defaults to 0.</p>
<p>Retry failed tests up to a set number of times, until they pass. This allows to pass the tests with intermittent failures. Defaults to 0.</p>
<h4 id="repeat_count">REPEAT_COUNT</h4>
<p>Repeat the tests up to a set number of times, stopping at first
failure. This helps to reproduce intermittent test failures. Defaults to
0.</p>
<h4 id="report">REPORT</h4>
<p>Use this report style when reporting test results (sent to JTReg as
<code>-report</code>). Defaults to <code>files</code>.</p>
<p>Repeat the tests up to a set number of times, stopping at first failure. This helps to reproduce intermittent test failures. Defaults to 0.</p>
<h3 id="gtest-keywords">Gtest keywords</h3>
<h4 id="repeat">REPEAT</h4>
<p>The number of times to repeat the tests
(<code>--gtest_repeat</code>).</p>
<p>Default is 1. Set to -1 to repeat indefinitely. This can be
especially useful combined with
<code>OPTIONS=--gtest_break_on_failure</code> to reproduce an
intermittent problem.</p>
<p>The number of times to repeat the tests (<code>--gtest_repeat</code>).</p>
<p>Default is 1. Set to -1 to repeat indefinitely. This can be especially useful combined with <code>OPTIONS=--gtest_break_on_failure</code> to reproduce an intermittent problem.</p>
<h4 id="options-1">OPTIONS</h4>
<p>Additional options to the Gtest test framework.</p>
<p>Use <code>GTEST="OPTIONS=--help"</code> to see all available Gtest
options.</p>
<p>Use <code>GTEST=&quot;OPTIONS=--help&quot;</code> to see all available Gtest options.</p>
<h4 id="aot_modules-2">AOT_MODULES</h4>
<p>Generate AOT modules before testing for the specified module, or set
of modules. If multiple modules are specified, they should be separated
by space (or, to help avoid quoting issues, the special value
<code>%20</code>).</p>
<p>Generate AOT modules before testing for the specified module, or set of modules. If multiple modules are specified, they should be separated by space (or, to help avoid quoting issues, the special value <code>%20</code>).</p>
<h3 id="microbenchmark-keywords">Microbenchmark keywords</h3>
<h4 id="fork">FORK</h4>
<p>Override the number of benchmark forks to spawn. Same as specifying
<code>-f &lt;num&gt;</code>.</p>
<p>Override the number of benchmark forks to spawn. Same as specifying <code>-f &lt;num&gt;</code>.</p>
<h4 id="iter">ITER</h4>
<p>Number of measurement iterations per fork. Same as specifying
<code>-i &lt;num&gt;</code>.</p>
<p>Number of measurement iterations per fork. Same as specifying <code>-i &lt;num&gt;</code>.</p>
<h4 id="time">TIME</h4>
<p>Amount of time to spend in each measurement iteration, in seconds.
Same as specifying <code>-r &lt;num&gt;</code></p>
<p>Amount of time to spend in each measurement iteration, in seconds. Same as specifying <code>-r &lt;num&gt;</code></p>
<h4 id="warmup_iter">WARMUP_ITER</h4>
<p>Number of warmup iterations to run before the measurement phase in
each fork. Same as specifying <code>-wi &lt;num&gt;</code>.</p>
<p>Number of warmup iterations to run before the measurement phase in each fork. Same as specifying <code>-wi &lt;num&gt;</code>.</p>
<h4 id="warmup_time">WARMUP_TIME</h4>
<p>Amount of time to spend in each warmup iteration. Same as specifying
<code>-w &lt;num&gt;</code>.</p>
<p>Amount of time to spend in each warmup iteration. Same as specifying <code>-w &lt;num&gt;</code>.</p>
<h4 id="results_format">RESULTS_FORMAT</h4>
<p>Specify to have the test run save a log of the values. Accepts the
same values as <code>-rff</code>, i.e., <code>text</code>,
<code>csv</code>, <code>scsv</code>, <code>json</code>, or
<code>latex</code>.</p>
<p>Specify to have the test run save a log of the values. Accepts the same values as <code>-rff</code>, i.e., <code>text</code>, <code>csv</code>, <code>scsv</code>, <code>json</code>, or <code>latex</code>.</p>
<h4 id="vm_options-2">VM_OPTIONS</h4>
<p>Additional VM arguments to provide to forked off VMs. Same as
<code>-jvmArgs &lt;args&gt;</code></p>
<p>Additional VM arguments to provide to forked off VMs. Same as <code>-jvmArgs &lt;args&gt;</code></p>
<h4 id="options-2">OPTIONS</h4>
<p>Additional arguments to send to JMH.</p>
<h2 id="notes-for-specific-tests">Notes for Specific Tests</h2>
<h3 id="docker-tests">Docker Tests</h3>
<p>Docker tests with default parameters may fail on systems with glibc
versions not compatible with the one used in the default docker image
(e.g., Oracle Linux 7.6 for x86). For example, they pass on Ubuntu 16.04
but fail on Ubuntu 18.04 if run like this on x86:</p>
<p>Docker tests with default parameters may fail on systems with glibc versions not compatible with the one used in the default docker image (e.g., Oracle Linux 7.6 for x86). For example, they pass on Ubuntu 16.04 but fail on Ubuntu 18.04 if run like this on x86:</p>
<pre><code>$ make test TEST=&quot;jtreg:test/hotspot/jtreg/containers/docker&quot;</code></pre>
<p>To run these tests correctly, additional parameters for the correct
docker image are required on Ubuntu 18.04 by using
<code>JAVA_OPTIONS</code>.</p>
<p>To run these tests correctly, additional parameters for the correct docker image are required on Ubuntu 18.04 by using <code>JAVA_OPTIONS</code>.</p>
<pre><code>$ make test TEST=&quot;jtreg:test/hotspot/jtreg/containers/docker&quot; \
JTREG=&quot;JAVA_OPTIONS=-Djdk.test.docker.image.name=ubuntu
-Djdk.test.docker.image.version=latest&quot;</code></pre>
<h3 id="non-us-locale">Non-US locale</h3>
<p>If your locale is non-US, some tests are likely to fail. To work
around this you can set the locale to US. On Unix platforms simply
setting <code>LANG="en_US"</code> in the environment before running
tests should work. On Windows or MacOS, setting
<code>JTREG="VM_OPTIONS=-Duser.language=en -Duser.country=US"</code>
helps for most, but not all test cases.</p>
<p>If your locale is non-US, some tests are likely to fail. To work around this you can set the locale to US. On Unix platforms simply setting <code>LANG=&quot;en_US&quot;</code> in the environment before running tests should work. On Windows or MacOS, setting <code>JTREG=&quot;VM_OPTIONS=-Duser.language=en -Duser.country=US&quot;</code> helps for most, but not all test cases.</p>
<p>For example:</p>
<pre><code>$ export LANG=&quot;en_US&quot; &amp;&amp; make test TEST=...
$ make test JTREG=&quot;VM_OPTIONS=-Duser.language=en -Duser.country=US&quot; TEST=...</code></pre>
<h3 id="pkcs11-tests">PKCS11 Tests</h3>
<p>It is highly recommended to use the latest NSS version when running
PKCS11 tests. Improper NSS version may lead to unexpected failures which
are hard to diagnose. For example,
sun/security/pkcs11/Secmod/AddTrustedCert.java may fail on Ubuntu 18.04
with the default NSS version in the system. To run these tests
correctly, the system property
<code>jdk.test.lib.artifacts.&lt;NAME&gt;</code> is required on Ubuntu
18.04 to specify the alternative NSS lib directory. The
<code>&lt;NAME&gt;</code> component should be replaced with the name
element of the appropriate <code>@Artifact</code> class. (See
<code>test/jdk/sun/security/pkcs11/PKCS11Test.java</code>)</p>
<p>It is highly recommended to use the latest NSS version when running PKCS11 tests. Improper NSS version may lead to unexpected failures which are hard to diagnose. For example, sun/security/pkcs11/Secmod/AddTrustedCert.java may fail on Ubuntu 18.04 with the default NSS version in the system. To run these tests correctly, the system property <code>test.nss.lib.paths</code> is required on Ubuntu 18.04 to specify the alternative NSS lib directories.</p>
<p>For example:</p>
<pre><code>$ make test TEST=&quot;jtreg:sun/security/pkcs11/Secmod/AddTrustedCert.java&quot; \
JTREG=&quot;JAVA_OPTIONS=-Djdk.test.lib.artifacts.nsslib-linux_aarch64=/path/to/NSS-libs&quot;</code></pre>
<p>For more notes about the PKCS11 tests, please refer to
test/jdk/sun/security/pkcs11/README.</p>
<h3 id="testing-with-alternative-security-providers">Testing with
alternative security providers</h3>
<p>Some security tests use a hardcoded provider for
<code>KeyFactory</code>, <code>Cipher</code>,
<code>KeyPairGenerator</code>, <code>KeyGenerator</code>,
<code>AlgorithmParameterGenerator</code>, <code>KeyAgreement</code>,
<code>Mac</code>, <code>MessageDigest</code>, <code>SecureRandom</code>,
<code>Signature</code>, <code>AlgorithmParameters</code>,
<code>Configuration</code>, <code>Policy</code>, or
<code>SecretKeyFactory</code> objects. Specify the
<code>-Dtest.provider.name=NAME</code> property to use a different
provider for the service(s).</p>
JTREG=&quot;JAVA_OPTIONS=-Dtest.nss.lib.paths=/path/to/your/latest/NSS-libs&quot;</code></pre>
<p>For more notes about the PKCS11 tests, please refer to test/jdk/sun/security/pkcs11/README.</p>
<h3 id="client-ui-tests">Client UI Tests</h3>
<h4 id="system-key-shortcuts">System key shortcuts</h4>
<p>Some Client UI tests use key sequences which may be reserved by the
operating system. Usually that causes the test failure. So it is highly
recommended to disable system key shortcuts prior testing. The steps to
access and disable system key shortcuts for various platforms are
provided below.</p>
<h5 id="macos">macOS</h5>
<p>Choose Apple menu; System Preferences, click Keyboard, then click
Shortcuts; select or deselect desired shortcut.</p>
<p>For example,
test/jdk/javax/swing/TooltipManager/JMenuItemToolTipKeyBindingsTest/JMenuItemToolTipKeyBindingsTest.java
fails on MacOS because it uses <code>CTRL + F1</code> key sequence to
show or hide tooltip message but the key combination is reserved by the
operating system. To run the test correctly the default global key
shortcut should be disabled using the steps described above, and then
deselect "Turn keyboard access on or off" option which is responsible
for <code>CTRL + F1</code> combination.</p>
<h5 id="linux">Linux</h5>
<p>Open the Activities overview and start typing Settings; Choose
Settings, click Devices, then click Keyboard; set or override desired
shortcut.</p>
<h5 id="windows">Windows</h5>
<p>Type <code>gpedit</code> in the Search and then click Edit group
policy; navigate to User Configuration -&gt; Administrative Templates
-&gt; Windows Components -&gt; File Explorer; in the right-side pane
look for "Turn off Windows key hotkeys" and double click on it; enable
or disable hotkeys.</p>
<p>Some Client UI tests use key sequences which may be reserved by the operating system. Usually that causes the test failure. So it is highly recommended to disable system key shortcuts prior testing. The steps to access and disable system key shortcuts for various platforms are provided below.</p>
<h4 id="macos">MacOS</h4>
<p>Choose Apple menu; System Preferences, click Keyboard, then click Shortcuts; select or deselect desired shortcut.</p>
<p>For example, test/jdk/javax/swing/TooltipManager/JMenuItemToolTipKeyBindingsTest/JMenuItemToolTipKeyBindingsTest.java fails on MacOS because it uses <code>CTRL + F1</code> key sequence to show or hide tooltip message but the key combination is reserved by the operating system. To run the test correctly the default global key shortcut should be disabled using the steps described above, and then deselect &quot;Turn keyboard access on or off&quot; option which is responsible for <code>CTRL + F1</code> combination.</p>
<h4 id="linux">Linux</h4>
<p>Open the Activities overview and start typing Settings; Choose Settings, click Devices, then click Keyboard; set or override desired shortcut.</p>
<h4 id="windows">Windows</h4>
<p>Type <code>gpedit</code> in the Search and then click Edit group policy; navigate to User Configuration -&gt; Administrative Templates -&gt; Windows Components -&gt; File Explorer; in the right-side pane look for &quot;Turn off Windows key hotkeys&quot; and double click on it; enable or disable hotkeys.</p>
<p>Note: restart is required to make the settings take effect.</p>
<h4 id="robot-api">Robot API</h4>
<p>Most automated Client UI tests use <code>Robot</code> API to control
the UI. Usually, the default operating system settings need to be
adjusted for Robot to work correctly. The detailed steps how to access
and update these settings for different platforms are provided
below.</p>
<h5 id="macos-1">macOS</h5>
<p><code>Robot</code> is not permitted to control your Mac by default
since macOS 10.15. To allow it, choose Apple menu -&gt; System Settings,
click Privacy &amp; Security; then click Accessibility and ensure the
following apps are allowed to control your computer: <em>Java</em> and
<em>Terminal</em>. If the tests are run from an IDE, the IDE should be
granted this permission too.</p>
<h5 id="windows-1">Windows</h5>
<p>On Windows if Cygwin terminal is used to run the tests, there is a
delay in focus transfer. Usually it causes automated UI test failure. To
disable the delay, type <code>regedit</code> in the Search and then
select Registry Editor; navigate to the following key:
<code>HKEY_CURRENT_USER\Control Panel\Desktop</code>; make sure the
<code>ForegroundLockTimeout</code> value is set to 0.</p>
<p>Additional information about Client UI tests configuration for
various operating systems can be obtained at <a
href="https://wiki.openjdk.org/display/ClientLibs/Automated+client+GUI+testing+system+set+up+requirements">Automated
client GUI testing system set up requirements</a></p>
<h2 id="editing-this-document">Editing this document</h2>
<p>If you want to contribute changes to this document, edit
<code>doc/testing.md</code> and then run
<code>make update-build-docs</code> to generate the same changes in
<code>doc/testing.html</code>.</p>
<p>If you want to contribute changes to this document, edit <code>doc/testing.md</code> and then run <code>make update-build-docs</code> to generate the same changes in <code>doc/testing.html</code>.</p>
</body>
</html>

View File

@@ -1,26 +1,21 @@
% Testing the JDK
## Overview
## Using "make test" (the run-test framework)
The bulk of JDK tests use [jtreg](https://openjdk.org/jtreg/), a regression
test framework and test runner built for the JDK's specific needs. Other test
frameworks are also used. The different test frameworks can be executed
directly, but there is also a set of make targets intended to simplify the
interface, and figure out how to run your tests for you.
This new way of running tests is developer-centric. It assumes that you have
built a JDK locally and want to test it. Running common test targets is simple,
and more complex ad-hoc combination of tests is possible. The user interface is
forgiving, and clearly report errors it cannot resolve.
## Running tests locally with `make test`
The main target `test` uses the jdk-image as the tested product. There is
also an alternate target `exploded-test` that uses the exploded image
instead. Not all tests will run successfully on the exploded image, but using
this target can greatly improve rebuild times for certain workflows.
This is the easiest way to get started. Assuming you've built the JDK locally,
execute:
$ make test
This will run a default set of tests against the JDK, and present you with the
results. `make test` is part of a family of test-related make targets which
simplify running tests, because they invoke the various test frameworks for
you. The "make test framework" is simple to start with, but more complex
ad-hoc combination of tests is also possible. You can always invoke the test
frameworks directly if you want even more control.
Previously, `make test` was used to invoke an old system for running tests, and
`make run-test` was used for the new test framework. For backward compatibility
with scripts and muscle memory, `run-test` (and variants like
`exploded-run-test` or `run-test-tier1`) are kept as aliases.
Some example command-lines:
@@ -33,20 +28,6 @@ Some example command-lines:
$ make test TEST="micro:java.lang.reflect" MICRO="FORK=1;WARMUP_ITER=2"
$ make exploded-test TEST=tier2
"tier1" and "tier2" refer to tiered testing, see further down. "TEST" is a
test selection argument which the make test framework will use to try to
find the tests you want. It iterates over the available test frameworks, and
if the test isn't present in one, it tries the next one. The main target
`test` uses the jdk-image as the tested product. There is also an alternate
target `exploded-test` that uses the exploded image instead. Not all tests
will run successfully on the exploded image, but using this target can
greatly improve rebuild times for certain workflows.
Previously, `make test` was used to invoke an old system for running tests,
and `make run-test` was used for the new test framework. For backward
compatibility with scripts and muscle memory, `run-test` and variants like
`exploded-run-test` or `run-test-tier1` are kept as aliases.
### Configuration
To be able to run JTReg tests, `configure` needs to know where to find the
@@ -102,61 +83,57 @@ test runs, the `test TEST="x"` solution needs to be used.
The test specifications given in `TEST` is parsed into fully qualified test
descriptors, which clearly and unambigously show which tests will be run. As an
example, `:tier1` will expand to include all subcomponent test directories
that define `tier1`, for example: `jtreg:$(TOPDIR)/test/hotspot/jtreg:tier1
jtreg:$(TOPDIR)/test/jdk:tier1 jtreg:$(TOPDIR)/test/langtools:tier1 ...`. You
can always submit a list of fully qualified test descriptors in the `TEST`
variable if you want to shortcut the parser.
example, `:tier1` will expand to `jtreg:$(TOPDIR)/test/hotspot/jtreg:tier1
jtreg:$(TOPDIR)/test/jdk:tier1 jtreg:$(TOPDIR)/test/langtools:tier1
jtreg:$(TOPDIR)/test/nashorn:tier1 jtreg:$(TOPDIR)/test/jaxp:tier1`. You can
always submit a list of fully qualified test descriptors in the `TEST` variable
if you want to shortcut the parser.
### Common Test Groups
Ideally, all tests are run for every change but this may not be practical due
to the limited testing resources, the scope of the change, etc.
Ideally, all tests are run for every change but this may not be practical due to the limited
testing resources, the scope of the change, etc.
The source tree currently defines a few common test groups in the relevant
`TEST.groups` files. There are test groups that cover a specific component,
for example `hotspot_gc`. It is a good idea to look into `TEST.groups` files
to get a sense what tests are relevant to a particular JDK component.
The source tree currently defines a few common test groups in the relevant `TEST.groups`
files. There are test groups that cover a specific component, for example `hotspot_gc`.
It is a good idea to look into `TEST.groups` files to get a sense what tests are relevant
to a particular JDK component.
Component-specific tests may miss some unintended consequences of a change, so
other tests should also be run. Again, it might be impractical to run all
tests, and therefore
_tiered_ test groups exist. Tiered test groups are not component-specific, but
rather cover the significant parts of the entire JDK.
Component-specific tests may miss some unintended consequences of a change, so other
tests should also be run. Again, it might be impractical to run all tests, and therefore
_tiered_ test groups exist. Tiered test groups are not component-specific, but rather cover
the significant parts of the entire JDK.
Multiple tiers allow balancing test coverage and testing costs. Lower test
tiers are supposed to contain the simpler, quicker and more stable tests.
Higher tiers are supposed to contain progressively more thorough, slower, and
sometimes less stable tests, or the tests that require special
configuration.
Multiple tiers allow balancing test coverage and testing costs. Lower test tiers are supposed to
contain the simpler, quicker and more stable tests. Higher tiers are supposed to contain
progressively more thorough, slower, and sometimes less stable tests, or the tests that require
special configuration.
Contributors are expected to run the tests for the areas that are changed, and
the first N tiers they can afford to run, but at least tier1.
Contributors are expected to run the tests for the areas that are changed, and the first N tiers
they can afford to run, but at least tier1.
A brief description of the tiered test groups:
- `tier1`: This is the lowest test tier. Multiple developers run these tests
every day. Because of the widespread use, the tests in `tier1` are
carefully selected and optimized to run fast, and to run in the most stable
manner. The test failures in `tier1` are usually followed up on quickly,
either with fixes, or adding relevant tests to problem list. GitHub Actions
workflows, if enabled, run `tier1` tests.
- `tier1`: This is the lowest test tier. Multiple developers run these tests every day.
Because of the widespread use, the tests in `tier1` are carefully selected and optimized to run
fast, and to run in the most stable manner. The test failures in `tier1` are usually followed up
on quickly, either with fixes, or adding relevant tests to problem list. GitHub Actions workflows,
if enabled, run `tier1` tests.
- `tier2`: This test group covers even more ground. These contain, among other
things, tests that either run for too long to be at `tier1`, or may require
special configuration, or tests that are less stable, or cover the broader
range of non-core JVM and JDK features/components(for example, XML).
- `tier2`: This test group covers even more ground. These contain, among other things,
tests that either run for too long to be at `tier1`, or may require special configuration,
or tests that are less stable, or cover the broader range of non-core JVM and JDK features/components
(for example, XML).
- `tier3`: This test group includes more stressful tests, the tests for corner
cases not covered by previous tiers, plus the tests that require GUIs. As
such, this suite should either be run with low concurrency
(`TEST_JOBS=1`), or without headful tests(`JTREG_KEYWORDS=\!headful`), or
both.
- `tier3`: This test group includes more stressful tests, the tests for corner cases
not covered by previous tiers, plus the tests that require GUIs. As such, this suite
should either be run with low concurrency (`TEST_JOBS=1`), or without headful tests
(`JTREG_KEYWORDS=\!headful`), or both.
- `tier4`: This test group includes every other test not covered by previous
tiers. It includes, for example, `vmTestbase` suites for Hotspot, which run
for many hours even on large machines. It also runs GUI tests, so the same
`TEST_JOBS` and `JTREG_KEYWORDS` caveats apply.
- `tier4`: This test group includes every other test not covered by previous tiers. It includes,
for example, `vmTestbase` suites for Hotspot, which run for many hours even on large
machines. It also runs GUI tests, so the same `TEST_JOBS` and `JTREG_KEYWORDS` caveats
apply.
### JTReg
@@ -192,11 +169,6 @@ use a fully qualified test descriptor, add `jtreg:`, e.g.
### Gtest
**Note:** To be able to run the Gtest suite, you need to configure your build to
be able to find a proper version of the gtest source. For details, see the
section ["Running Tests" in the build
documentation](building.html#running-tests).
Since the Hotspot Gtest suite is so quick, the default is to run all tests.
This is specified by just `gtest`, or as a fully qualified test descriptor
`gtest:all`.
@@ -378,15 +350,6 @@ Defaults to 4.
Sets the argument `-timeoutHandlerTimeout` for JTReg. The default value is 0.
This is only valid if the failure handler is built.
#### JTREG_TEST_THREAD_FACTORY
Sets the `-testThreadFactory` for JTReg. It should be the fully qualified classname
of a class which implements `java.util.concurrent.ThreadFactory`.
One such implementation class, named Virtual, is currently part of the JDK build
in the `test/jtreg_test_thread_factory/` directory. This class gets compiled during
the test image build. The implementation of the Virtual class creates a new virtual
thread for executing each test class.
#### TEST_MODE
The test mode (`agentvm` or `othervm`).
@@ -493,11 +456,6 @@ Repeat the tests up to a set number of times, stopping at first failure.
This helps to reproduce intermittent test failures.
Defaults to 0.
#### REPORT
Use this report style when reporting test results (sent to JTReg as `-report`).
Defaults to `files`.
### Gtest keywords
#### REPEAT
@@ -600,40 +558,27 @@ It is highly recommended to use the latest NSS version when running PKCS11
tests. Improper NSS version may lead to unexpected failures which are hard to
diagnose. For example, sun/security/pkcs11/Secmod/AddTrustedCert.java may fail
on Ubuntu 18.04 with the default NSS version in the system. To run these tests
correctly, the system property `jdk.test.lib.artifacts.<NAME>` is required on
Ubuntu 18.04 to specify the alternative NSS lib directory. The `<NAME>`
component should be replaced with the name element of the appropriate
`@Artifact` class. (See `test/jdk/sun/security/pkcs11/PKCS11Test.java`)
correctly, the system property `test.nss.lib.paths` is required on Ubuntu 18.04
to specify the alternative NSS lib directories.
For example:
```
$ make test TEST="jtreg:sun/security/pkcs11/Secmod/AddTrustedCert.java" \
JTREG="JAVA_OPTIONS=-Djdk.test.lib.artifacts.nsslib-linux_aarch64=/path/to/NSS-libs"
JTREG="JAVA_OPTIONS=-Dtest.nss.lib.paths=/path/to/your/latest/NSS-libs"
```
For more notes about the PKCS11 tests, please refer to
test/jdk/sun/security/pkcs11/README.
### Testing with alternative security providers
Some security tests use a hardcoded provider for `KeyFactory`, `Cipher`,
`KeyPairGenerator`, `KeyGenerator`, `AlgorithmParameterGenerator`,
`KeyAgreement`, `Mac`, `MessageDigest`, `SecureRandom`, `Signature`,
`AlgorithmParameters`, `Configuration`, `Policy`, or `SecretKeyFactory` objects.
Specify the `-Dtest.provider.name=NAME` property to use a different provider for
the service(s).
### Client UI Tests
#### System key shortcuts
Some Client UI tests use key sequences which may be reserved by the operating
system. Usually that causes the test failure. So it is highly recommended to
disable system key shortcuts prior testing. The steps to access and disable
system key shortcuts for various platforms are provided below.
##### macOS
#### MacOS
Choose Apple menu; System Preferences, click Keyboard, then click Shortcuts;
select or deselect desired shortcut.
@@ -646,12 +591,12 @@ test correctly the default global key shortcut should be disabled using the
steps described above, and then deselect "Turn keyboard access on or off"
option which is responsible for `CTRL + F1` combination.
##### Linux
#### Linux
Open the Activities overview and start typing Settings; Choose Settings, click
Devices, then click Keyboard; set or override desired shortcut.
##### Windows
#### Windows
Type `gpedit` in the Search and then click Edit group policy; navigate to User
Configuration -> Administrative Templates -> Windows Components -> File
@@ -660,33 +605,6 @@ double click on it; enable or disable hotkeys.
Note: restart is required to make the settings take effect.
#### Robot API
Most automated Client UI tests use `Robot` API to control the UI. Usually,
the default operating system settings need to be adjusted for Robot
to work correctly. The detailed steps how to access and update these settings
for different platforms are provided below.
##### macOS
`Robot` is not permitted to control your Mac by default since
macOS 10.15. To allow it, choose Apple menu -> System Settings, click
Privacy & Security; then click Accessibility and ensure the following apps are
allowed to control your computer: *Java* and *Terminal*. If the tests are run
from an IDE, the IDE should be granted this permission too.
##### Windows
On Windows if Cygwin terminal is used to run the tests, there is a delay in
focus transfer. Usually it causes automated UI test failure. To disable the
delay, type `regedit` in the Search and then select Registry Editor; navigate
to the following key: `HKEY_CURRENT_USER\Control Panel\Desktop`; make sure
the `ForegroundLockTimeout` value is set to 0.
Additional information about Client UI tests configuration for various operating
systems can be obtained at [Automated client GUI testing system set up
requirements](https://wiki.openjdk.org/display/ClientLibs/Automated+client+GUI+testing+system+set+up+requirements)
## Editing this document
If you want to contribute changes to this document, edit `doc/testing.md` and

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -242,10 +242,7 @@ ifneq ($(filter product-bundles% legacy-bundles, $(MAKECMDGOALS)), )
)
JDK_SYMBOLS_BUNDLE_FILES := \
$(filter-out \
%.stripped.pdb, \
$(call FindFiles, $(SYMBOLS_IMAGE_DIR)) \
)
$(call FindFiles, $(SYMBOLS_IMAGE_DIR))
TEST_DEMOS_BUNDLE_FILES := $(filter $(JDK_DEMOS_IMAGE_HOMEDIR)/demo/%, \
$(ALL_JDK_DEMOS_FILES))
@@ -452,7 +449,7 @@ ifneq ($(filter jcov-bundles, $(MAKECMDGOALS)), )
BUNDLE_NAME := $(JCOV_BUNDLE_NAME), \
FILES := $(JCOV_BUNDLE_FILES), \
BASE_DIRS := $(JCOV_IMAGE_DIR), \
SUBDIR := jdk-$(VERSION_NUMBER), \
SUBDIR := $(JDK_BUNDLE_SUBDIR), \
))
JCOV_TARGETS += $(BUILD_JCOV_BUNDLE)

View File

@@ -171,41 +171,41 @@ $(BUILD_DEMO_CodePointIM_JAR): $(CODEPOINT_METAINF_SERVICE_FILE)
$(eval $(call SetupBuildDemo, FileChooserDemo, \
DEMO_SUBDIR := jfc, \
DISABLED_WARNINGS := rawtypes deprecation unchecked this-escape, \
DISABLED_WARNINGS := rawtypes deprecation unchecked, \
))
$(eval $(call SetupBuildDemo, SwingSet2, \
DEMO_SUBDIR := jfc, \
EXTRA_COPY_TO_JAR := .java, \
EXTRA_MANIFEST_ATTR := SplashScreen-Image: resources/images/splash.png, \
DISABLED_WARNINGS := rawtypes deprecation unchecked static serial cast this-escape, \
DISABLED_WARNINGS := rawtypes deprecation unchecked static serial cast, \
))
$(eval $(call SetupBuildDemo, Font2DTest, \
DISABLED_WARNINGS := rawtypes deprecation unchecked serial cast this-escape, \
DISABLED_WARNINGS := rawtypes deprecation unchecked serial cast, \
DEMO_SUBDIR := jfc, \
))
$(eval $(call SetupBuildDemo, J2Ddemo, \
DEMO_SUBDIR := jfc, \
MAIN_CLASS := java2d.J2Ddemo, \
DISABLED_WARNINGS := rawtypes deprecation unchecked cast lossy-conversions this-escape, \
DISABLED_WARNINGS := rawtypes deprecation unchecked cast lossy-conversions, \
JAR_NAME := J2Ddemo, \
))
$(eval $(call SetupBuildDemo, Metalworks, \
DISABLED_WARNINGS := rawtypes unchecked this-escape, \
DISABLED_WARNINGS := rawtypes unchecked, \
DEMO_SUBDIR := jfc, \
))
$(eval $(call SetupBuildDemo, Notepad, \
DISABLED_WARNINGS := rawtypes this-escape, \
DISABLED_WARNINGS := rawtypes, \
DEMO_SUBDIR := jfc, \
))
$(eval $(call SetupBuildDemo, Stylepad, \
DEMO_SUBDIR := jfc, \
DISABLED_WARNINGS := rawtypes unchecked this-escape, \
DISABLED_WARNINGS := rawtypes unchecked, \
EXTRA_SRC_DIR := $(DEMO_SHARE_SRC)/jfc/Notepad, \
EXCLUDE_FILES := $(DEMO_SHARE_SRC)/jfc/Notepad/README.txt, \
))
@@ -215,12 +215,11 @@ $(eval $(call SetupBuildDemo, SampleTree, \
))
$(eval $(call SetupBuildDemo, TableExample, \
DISABLED_WARNINGS := rawtypes unchecked deprecation this-escape, \
DISABLED_WARNINGS := rawtypes unchecked deprecation, \
DEMO_SUBDIR := jfc, \
))
$(eval $(call SetupBuildDemo, TransparentRuler, \
DISABLED_WARNINGS := this-escape, \
DEMO_SUBDIR := jfc, \
MAIN_CLASS := transparentruler.Ruler, \
))

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -98,7 +98,6 @@ define SetupInterimModule
EXCLUDES := sun javax/tools/snippet-files, \
EXCLUDE_FILES := $(TOPDIR)/src/$1/share/classes/module-info.java \
$(TOPDIR)/src/$1/share/classes/javax/tools/ToolProvider.java \
$(TOPDIR)/src/$1/share/classes/com/sun/tools/javac/launcher/Main.java \
Standard.java, \
EXTRA_FILES := $(BUILDTOOLS_OUTPUTDIR)/gensrc/$1.interim/module-info.java \
$($1.interim_EXTRA_FILES), \
@@ -110,9 +109,7 @@ define SetupInterimModule
$$(INTERIM_LANGTOOLS_ADD_EXPORTS) \
--patch-module java.base=$(BUILDTOOLS_OUTPUTDIR)/gensrc/java.base.interim \
--add-exports java.base/jdk.internal.javac=java.compiler.interim \
--add-exports java.base/jdk.internal.javac=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.javadoc.interim, \
--add-exports java.base/jdk.internal.javac=jdk.compiler.interim, \
))
$1_DEPS_INTERIM := $$(addsuffix .interim, $$(filter \
@@ -128,20 +125,5 @@ $(foreach m, $(INTERIM_LANGTOOLS_BASE_MODULES), \
)
################################################################################
# Setup the compilation of the javac server build tool. Technically, this is not
# really "interim" langtools, but just like it, it is needed henceforth for all
# java compilation using the interim compiler.
$(eval $(call SetupJavaCompilation, BUILD_JAVAC_SERVER, \
COMPILER := bootjdk, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
SRC := $(TOPDIR)/make/langtools/tools, \
INCLUDES := javacserver, \
BIN := $(BUILDTOOLS_OUTPUTDIR)/langtools_javacserver_classes, \
))
TARGETS += $(BUILD_JAVAC_SERVER)
################################################################################
all: $(TARGETS)

View File

@@ -53,7 +53,7 @@ $(eval $(call SetupJavaCompilation, BUILD_JIGSAW_TOOLS, \
build/tools/jigsaw, \
COPY := .properties .html, \
BIN := $(TOOLS_CLASSES_DIR), \
DISABLED_WARNINGS := fallthrough this-escape, \
DISABLED_WARNINGS := fallthrough, \
JAVAC_FLAGS := \
--add-modules jdk.jdeps \
--add-exports java.base/jdk.internal.module=ALL-UNNAMED \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -57,9 +57,7 @@ $(eval $(call SetupJavaCompilation, BUILD_TOOLS_JDK, \
JAVAC_FLAGS := \
--add-exports java.desktop/sun.awt=ALL-UNNAMED \
--add-exports java.base/sun.text=ALL-UNNAMED \
--add-exports java.base/sun.security.util=ALL-UNNAMED \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.javadoc.interim, \
--add-exports java.base/sun.security.util=ALL-UNNAMED, \
))
TARGETS += $(BUILD_TOOLS_JDK)
@@ -71,13 +69,6 @@ $(eval $(call SetupCopyFiles,COPY_NIMBUS_TEMPLATES, \
TARGETS += $(COPY_NIMBUS_TEMPLATES)
$(eval $(call SetupCopyFiles,COPY_CLDRCONVERTER_PROPERTIES, \
SRC := $(TOPDIR)/make/jdk/src/classes/build/tools/cldrconverter, \
DEST := $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes/build/tools/cldrconverter, \
FILES := $(wildcard $(TOPDIR)/make/jdk/src/classes/build/tools/cldrconverter/*.properties)))
TARGETS += $(COPY_CLDRCONVERTER_PROPERTIES)
################################################################################
$(eval $(call SetupJavaCompilation, COMPILE_DEPEND, \
@@ -92,9 +83,7 @@ $(eval $(call SetupJavaCompilation, COMPILE_DEPEND, \
--add-exports jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED \
--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED \
--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED \
--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.javadoc.interim, \
--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, \
))
DEPEND_SERVICE_PROVIDER := $(BUILDTOOLS_OUTPUTDIR)/depend/META-INF/services/com.sun.source.util.Plugin

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -81,11 +81,13 @@ endif
ifneq ($(CMDS_DIR), )
DEPS += $(call FindFiles, $(CMDS_DIR))
ifeq ($(call isTargetOs, windows)+$(SHIP_DEBUG_SYMBOLS), true+public)
# For public debug symbols on Windows, we have to use stripped pdbs and rename them
# For public debug symbols on Windows, we have to use stripped pdbs, rename them
# and filter out a few launcher pdbs where there's a lib that goes by the same name
rename_stripped = $(patsubst %.stripped.pdb,%.pdb,$1)
CMDS_DIR_FILTERED := $(subst modules_cmds,modules_cmds_filtered, $(CMDS_DIR))
FILES_CMDS := $(filter-out %.pdb, $(call FindFiles, $(CMDS_DIR))) \
$(filter %.stripped.pdb, $(call FindFiles, $(CMDS_DIR)))
$(filter-out %jimage.stripped.pdb %jpackage.stripped.pdb %java.stripped.pdb, \
$(filter %.stripped.pdb, $(call FindFiles, $(CMDS_DIR))))
$(eval $(call SetupCopyFiles, COPY_FILTERED_CMDS, \
SRC := $(CMDS_DIR), \
DEST := $(CMDS_DIR_FILTERED), \
@@ -94,6 +96,18 @@ ifneq ($(CMDS_DIR), )
))
DEPS += $(COPY_FILTERED_CMDS)
JMOD_FLAGS += --cmds $(CMDS_DIR_FILTERED)
else ifeq ($(call isTargetOs, windows)+$(SHIP_DEBUG_SYMBOLS), true+full)
# For full debug symbols on Windows, we have to filter out a few launcher pdbs
# where there's a lib that goes by the same name
CMDS_DIR_FILTERED := $(subst modules_cmds,modules_cmds_filtered, $(CMDS_DIR))
$(eval $(call SetupCopyFiles, COPY_FILTERED_CMDS, \
SRC := $(CMDS_DIR), \
DEST := $(CMDS_DIR_FILTERED), \
FILES := $(filter-out %jimage.pdb %jpackage.pdb %java.pdb, \
$(call FindFiles, $(CMDS_DIR))), \
))
DEPS += $(COPY_FILTERED_CMDS)
JMOD_FLAGS += --cmds $(CMDS_DIR_FILTERED)
else
JMOD_FLAGS += --cmds $(CMDS_DIR)
endif

View File

@@ -1,4 +1,4 @@
# Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -80,7 +80,6 @@ JAVADOC_TAGS := \
-taglet build.tools.taglet.JSpec\$$JLS \
-taglet build.tools.taglet.JSpec\$$JVMS \
-taglet build.tools.taglet.ModuleGraph \
-taglet build.tools.taglet.SealedGraph \
-taglet build.tools.taglet.ToolGuide \
-tag since \
-tag serialData \
@@ -102,23 +101,17 @@ REFERENCE_TAGS := $(JAVADOC_TAGS)
JAVADOC_DISABLED_DOCLINT_WARNINGS := missing
JAVADOC_DISABLED_DOCLINT_PACKAGES := org.w3c.* javax.smartcardio
# Allow overriding on the command line
# (intentionally sharing name with the javac option)
JAVA_WARNINGS_ARE_ERRORS ?= -Werror
# The initial set of options for javadoc
JAVADOC_OPTIONS := -use -keywords -notimestamp \
-encoding ISO-8859-1 -docencoding UTF-8 -breakiterator \
-splitIndex --system none -javafx --expand-requires transitive \
--override-methods=summary \
--no-external-specs-page
--override-methods=summary
# The reference options must stay stable to allow for comparisons across the
# development cycle.
REFERENCE_OPTIONS := -XDignore.symbol.file=true -use -keywords -notimestamp \
-encoding ISO-8859-1 -breakiterator -splitIndex --system none \
-html5 -javafx --expand-requires transitive \
--no-external-specs-page
-html5 -javafx --expand-requires transitive
# Should we add DRAFT stamps to the generated javadoc?
ifeq ($(VERSION_IS_GA), true)
@@ -194,55 +187,25 @@ JAVASE_LONG_NAME := Java<sup>&reg;</sup> Platform, Standard Edition
# Functions
# Helper function for creating a svg file from a dot file generated by the
# GenGraphs tool for a module.
# GenGraphs tool.
# param 1: SetupJavadocGeneration namespace ($1)
# param 2: module name
#
define setup_module_graph_dot_to_svg
$1_$2_DOT_SRC := $$($1_MODULE_GRAPHS_DIR)/$2.dot
define setup_gengraph_dot_to_svg
$1_$2_DOT_SRC := $$($1_GENGRAPHS_DIR)/$2.dot
$1_$2_SVG_TARGET := $$($1_TARGET_DIR)/$2/module-graph.svg
# For each module needing a graph, create a svg file from the dot file
# generated by the GenGraphs tool and store it in the target dir.
$$(eval $$(call SetupExecute, module_graphs_svg_$1_$2, \
$$(eval $$(call SetupExecute, gengraphs_svg_$1_$2, \
INFO := Running dot for module graphs for $2, \
DEPS := $$(module_graphs_dot_$1_TARGET), \
DEPS := $$(gengraphs_$1_TARGET), \
OUTPUT_FILE := $$($1_$2_SVG_TARGET), \
SUPPORT_DIR := $$($1_MODULE_GRAPHS_DIR), \
SUPPORT_DIR := $$($1_GENGRAPHS_DIR), \
COMMAND := $$(DOT) -Tsvg -o $$($1_$2_SVG_TARGET) $$($1_$2_DOT_SRC), \
))
$1_GRAPHS_TARGETS += $$($1_$2_SVG_TARGET)
endef
# Helper function for creating a svg file for a class for which the SealedGraph
# taglet has generated a dot file. The dot file has a special name which
# encodes the module and class the graph belongs to.
#
# param 1: SetupJavadocGeneration namespace ($1)
# param 2: dot file name
#
define setup_sealed_graph_dot_to_svg
$1_$2_DOT_SRC := $$($1_SEALED_GRAPHS_DIR)/$2.dot
$1_$2_TARGET_CLASS := $$(word 2, $$(subst _, , $2))
$1_$2_SLASHED_NAME := $$(subst .,/, $$($1_$2_TARGET_CLASS))
$1_$2_TARGET_MODULE := $$(word 1, $$(subst _, , $2))
$1_$2_TARGET_PATH := $$($1_TARGET_DIR)/$$($1_$2_TARGET_MODULE)/$$(dir $$($1_$2_SLASHED_NAME))
$1_$2_TARGET_NAME := $$(notdir $$($1_$2_SLASHED_NAME))
$1_$2_SVG_TARGET := $$($1_$2_TARGET_PATH)/$$($1_$2_TARGET_NAME)-sealed-graph.svg
$$(call MakeDir, $$($1_$2_TARGET_PATH))
# For each class needing a graph, create a svg file from the dot file
# generated by the SealedGraph taglet and store it in the target dir.
$$(eval $$(call SetupExecute, sealed_graphs_svg_$1_$2, \
INFO := Running dot for sealed graphs for $$($1_$2_TARGET_MODULE)/$$($1_$2_TARGET_CLASS), \
DEPS := $$($1_$2_DOT_SRC), \
OUTPUT_FILE := $$($1_$2_SVG_TARGET), \
SUPPORT_DIR := $$($1_SEALED_GRAPHS_DIR), \
COMMAND := $$(DOT) -Tsvg -o $$($1_$2_SVG_TARGET) $$($1_$2_DOT_SRC), \
))
$1_GRAPHS_TARGETS += $$($1_$2_SVG_TARGET)
$1_MODULEGRAPH_TARGETS += $$($1_$2_SVG_TARGET)
endef
# Helper function to create the overview.html file to use with the -overview
@@ -290,7 +253,7 @@ endef
#
# Parameter 1 is the name of the rule. This name is used as variable prefix.
# Targets generated are returned as $1_JAVADOC_TARGETS and
# $1_GRAPHS_TARGETS. Note that the index.html file will work as a "touch
# $1_MODULEGRAPH_TARGETS. Note that the index.html file will work as a "touch
# file" for all the magnitude of files that are generated by javadoc.
#
# Remaining parameters are named arguments. These include:
@@ -313,12 +276,9 @@ define SetupApiDocsGenerationBody
-Djspec.version=$$(VERSION_SPECIFICATION)
ifeq ($$(ENABLE_FULL_DOCS), true)
$1_SEALED_GRAPHS_DIR := $$(SUPPORT_OUTPUTDIR)/docs/$1-sealed-graphs
# Tell the ModuleGraph and SealedGraph taglets to generate html links to
# soon-to-be-created svg files with module/sealed graphs.
$1_JAVA_ARGS += -DenableModuleGraph=true -DsealedDotOutputDir=$$($1_SEALED_GRAPHS_DIR)
$$(call MakeDir, $$($1_SEALED_GRAPHS_DIR))
# Tell the ModuleGraph taglet to generate html links to soon-to-be-created
# svg files with module graphs.
$1_JAVA_ARGS += -DenableModuleGraph=true
endif
# Start with basic options and tags
@@ -339,7 +299,6 @@ define SetupApiDocsGenerationBody
# Ignore the doclint warnings in certain packages
$1_OPTIONS += -Xdoclint/package:$$(call CommaList, $$(addprefix -, \
$$(JAVADOC_DISABLED_DOCLINT_PACKAGES)))
$1_OPTIONS += $$(JAVA_WARNINGS_ARE_ERRORS)
$1_DOC_TITLE := $$($1_LONG_NAME)<br>Version $$(VERSION_SPECIFICATION) API \
Specification
@@ -425,46 +384,30 @@ define SetupApiDocsGenerationBody
# First we run the GenGraph tool. It will query the module structure of the
# running JVM and output .dot files for all existing modules.
MODULE_GRAPHS_PROPS := \
GENGRAPHS_PROPS := \
$$(TOPDIR)/make/jdk/src/classes/build/tools/jigsaw/javadoc-graphs.properties
$1_MODULE_GRAPHS_DIR := $$(SUPPORT_OUTPUTDIR)/docs/$1-module-graphs
$1_GENGRAPHS_DIR := $$(SUPPORT_OUTPUTDIR)/docs/$1-gengraphs
$$(eval $$(call SetupExecute, module_graphs_dot_$1, \
INFO := Generating module graphs for $1 documentation, \
DEPS := $$(BUILD_JIGSAW_TOOLS) $$(MODULE_GRAPHS_PROPS), \
OUTPUT_DIR := $$($1_MODULE_GRAPHS_DIR), \
COMMAND := $$(TOOL_GENGRAPHS) --spec --output $$($1_MODULE_GRAPHS_DIR) \
--dot-attributes $$(MODULE_GRAPHS_PROPS), \
$$(eval $$(call SetupExecute, gengraphs_$1, \
INFO := Running gengraphs for $1 documentation, \
DEPS := $$(BUILD_JIGSAW_TOOLS) $$(GENGRAPHS_PROPS), \
OUTPUT_DIR := $$($1_GENGRAPHS_DIR), \
COMMAND := $$(TOOL_GENGRAPHS) --spec --output $$($1_GENGRAPHS_DIR) \
--dot-attributes $$(GENGRAPHS_PROPS), \
))
# For each module needing a graph, create a svg file from the dot file
# generated by the GenGraphs tool and store it in the target dir.
# They will depend on module_graphs_dot_$1_TARGET, and will be added to
# $1_GRAPHS_TARGETS.
# They will depend on gengraphs_$1_TARGET, and will be added to $1.
$$(foreach m, $$($1_MODULES_NEEDING_GRAPH), \
$$(eval $$(call setup_module_graph_dot_to_svg,$1,$$m)) \
)
# We have asked SealedGraph to generate dot files and links to svg files.
# Now we must produce the svg files from the dot files.
# Get a list of classes for which SealedGraph has generated dot files
$1_SEALED_CLASSES := $$(patsubst %.dot,%,$$(patsubst \
$$($1_SEALED_GRAPHS_DIR)/%,%, \
$$(wildcard $$($1_SEALED_GRAPHS_DIR)/*.dot)))
# For each class needing a graph, create a svg file from the dot file
# generated by the SealedGraph taglet and store it in the target dir.
# They will will be added to $1_GRAPHS_TARGETS.
$$(foreach c, $$($1_SEALED_CLASSES), \
$$(eval $$(call setup_sealed_graph_dot_to_svg,$1,$$c)) \
$$(eval $$(call setup_gengraph_dot_to_svg,$1,$$m)) \
)
endif
endef
################################################################################
# Setup generation of the JDK API documentation (javadoc + graphs)
# Setup generation of the JDK API documentation (javadoc + modulegraph)
# Define the groups of the JDK API documentation
JavaSE_GROUP_NAME := Java SE
@@ -513,10 +456,10 @@ $(eval $(call SetupApiDocsGeneration, JDK_API, \
))
# Targets generated are returned in JDK_API_JAVADOC_TARGETS and
# JDK_API_GRAPHS_TARGETS.
# JDK_API_MODULEGRAPH_TARGETS.
################################################################################
# Setup generation of the Java SE API documentation (javadoc + graphs)
# Setup generation of the Java SE API documentation (javadoc + modulegraph)
# The Java SE module scope is just java.se and its transitive indirect
# exports.
@@ -530,10 +473,10 @@ $(eval $(call SetupApiDocsGeneration, JAVASE_API, \
))
# Targets generated are returned in JAVASE_API_JAVADOC_TARGETS and
# JAVASE_API_GRAPHS_TARGETS.
# JAVASE_API_MODULEGRAPH_TARGETS.
################################################################################
# Setup generation of the reference Java SE API documentation (javadoc + graphs)
# Setup generation of the reference Java SE API documentation (javadoc + modulegraph)
# The reference javadoc is just the same as javase, but using the BootJDK javadoc
# and a stable set of javadoc options. Typically it is used for generating
@@ -551,7 +494,7 @@ $(eval $(call SetupApiDocsGeneration, REFERENCE_API, \
))
# Targets generated are returned in REFERENCE_API_JAVADOC_TARGETS and
# REFERENCE_API_GRAPHS_TARGETS.
# REFERENCE_API_MODULEGRAPH_TARGETS.
################################################################################
@@ -621,7 +564,7 @@ $(foreach n, 0 1 2, \
$(eval specs_bottom_rel_path := $(specs_bottom_rel_path)../) \
)
SPECS_TOP := $(if $(filter true, $(IS_DRAFT)), <header class="draft-header" role="banner">$(DRAFT_TEXT)</header>)
SPECS_TOP := $(if $(filter true, $(IS_DRAFT)), <header class="draft-header">$(DRAFT_TEXT)</header>)
# For all html files in $module/share/specs directories, copy and add the
# copyright footer.
@@ -650,9 +593,6 @@ ifeq ($(ENABLE_PANDOC), true)
# html, if we have pandoc (otherwise we'll just skip this).
GLOBAL_SPECS_DEFAULT_CSS_FILE := $(DOCS_OUTPUTDIR)/resources/jdk-default.css
# Unset the following to suppress the link to the tool guides
NAV_LINK_GUIDES := --nav-link-guides
HEADER_RIGHT_SIDE_INFO := <strong>$(subst &amp;,&,$(JDK_SHORT_NAME))$(DRAFT_MARKER_STR)</strong>
$(foreach m, $(ALL_MODULES), \
$(eval SPECS_$m := $(call FindModuleSpecsDirs, $m)) \
@@ -669,8 +609,7 @@ ifeq ($(ENABLE_PANDOC), true)
REPLACEMENTS := \
@@VERSION_SPECIFICATION@@ => $(VERSION_SPECIFICATION) ; \
@@VERSION_STRING@@ => $(VERSION_STRING), \
POST_PROCESS := $(TOOL_FIXUPPANDOC) --insert-nav --nav-right-info '$(HEADER_RIGHT_SIDE_INFO)' \
--nav-subdirs $($m_$f_NOF_SUBDIRS) $(NAV_LINK_GUIDES), \
POST_PROCESS := $(TOOL_FIXUPPANDOC), \
)) \
$(eval JDK_SPECS_TARGETS += $($($m_$f_NAME))) \
) \
@@ -704,8 +643,7 @@ ifeq ($(ENABLE_PANDOC), true)
@@VERSION_SHORT@@ => $(VERSION_SHORT) ; \
@@VERSION_SPECIFICATION@@ => $(VERSION_SPECIFICATION), \
OPTIONS := --toc -V include-before='$(SPECS_TOP)' -V include-after='$(SPECS_BOTTOM_1)', \
POST_PROCESS := $(TOOL_FIXUPPANDOC) --insert-nav --nav-right-info '$(HEADER_RIGHT_SIDE_INFO)' \
--nav-subdirs 1 --nav-link-guides, \
POST_PROCESS := $(TOOL_FIXUPPANDOC), \
EXTRA_DEPS := $(PANDOC_HTML_MANPAGE_FILTER) \
$(PANDOC_HTML_MANPAGE_FILTER_SOURCE), \
)) \
@@ -720,25 +658,13 @@ endif
# Special treatment for generated documentation
SPEC_HEADER_BLOCK := \
<header id="title-block-header"> \
<div class="navbar"> \
<div>$(HEADER_RIGHT_SIDE_INFO)</div> \
<nav><ul><li><a href="PATH_TO_SPECS/../api/index.html">API</a> \
<li><a href="PATH_TO_SPECS/index.html">OTHER SPECIFICATIONS \
<li><a href="PATH_TO_SPECS/man/index.html">TOOL GUIDES</a></ul></nav> \
</div> \
</header>
JDWP_PROTOCOL := $(SUPPORT_OUTPUTDIR)/gensrc/jdk.jdi/jdwp-protocol.html
ifneq ($(call ApplySpecFilter, $(JDWP_PROTOCOL)), )
JDWP_HEADER_BLOCK := $(subst PATH_TO_SPECS,..,$(SPEC_HEADER_BLOCK))
$(eval $(call SetupTextFileProcessing, PROCESS_JDWP_PROTOCOL, \
SOURCE_FILES := $(JDWP_PROTOCOL), \
OUTPUT_DIR := $(DOCS_OUTPUTDIR)/specs/jdwp, \
REPLACEMENTS := \
<style> => <link rel="stylesheet" href="../../resources/jdk-default.css"/><style> ; \
<body> => <body>$(SPECS_TOP)$(JDWP_HEADER_BLOCK) ; \
<body> => <body>$(SPECS_TOP) ; \
</body> => $(SPECS_BOTTOM_1)</body>, \
))
JDK_SPECS_TARGETS += $(PROCESS_JDWP_PROTOCOL)
@@ -747,13 +673,11 @@ endif
# Get jvmti.html from the main jvm variant (all variants' jvmti.html are identical).
JVMTI_HTML ?= $(HOTSPOT_OUTPUTDIR)/variant-$(JVM_VARIANT_MAIN)/gensrc/jvmtifiles/jvmti.html
ifneq ($(call ApplySpecFilter, $(JVMTI_HTML)), )
JVMTI_HEADER_BLOCK := $(subst PATH_TO_SPECS,.,$(SPEC_HEADER_BLOCK))
$(eval $(call SetupTextFileProcessing, PROCESS_JVMTI_HTML, \
SOURCE_FILES := $(JVMTI_HTML), \
OUTPUT_DIR := $(DOCS_OUTPUTDIR)/specs/, \
REPLACEMENTS := \
<style> => <link rel="stylesheet" href="../resources/jdk-default.css"/><style> ; \
<body> => <body>$(SPECS_TOP)$(JVMTI_HEADER_BLOCK) ; \
<body> => <body>$(SPECS_TOP) ; \
</body> => $(SPECS_BOTTOM_0)</body>, \
))
JDK_SPECS_TARGETS += $(PROCESS_JVMTI_HTML)
@@ -768,7 +692,7 @@ JAVADOC_ZIP_FILE := $(OUTPUTDIR)/bundles/$(JAVADOC_ZIP_NAME)
$(eval $(call SetupZipArchive, BUILD_JAVADOC_ZIP, \
SRC := $(DOCS_OUTPUTDIR), \
ZIP := $(JAVADOC_ZIP_FILE), \
EXTRA_DEPS := $(JDK_API_JAVADOC_TARGETS) $(JDK_API_GRAPHS_TARGETS) \
EXTRA_DEPS := $(JDK_API_JAVADOC_TARGETS) $(JDK_API_MODULEGRAPH_TARGETS) \
$(JDK_SPECS_TARGETS), \
))
@@ -796,15 +720,15 @@ SPECS_ZIP_TARGETS += $(BUILD_SPECS_ZIP)
docs-jdk-api-javadoc: $(JDK_API_JAVADOC_TARGETS) $(JDK_API_CUSTOM_TARGETS)
docs-jdk-api-graphs: $(JDK_API_GRAPHS_TARGETS)
docs-jdk-api-modulegraph: $(JDK_API_MODULEGRAPH_TARGETS)
docs-javase-api-javadoc: $(JAVASE_API_JAVADOC_TARGETS) $(JAVASE_API_CUSTOM_TARGETS)
docs-javase-api-graphs: $(JAVASE_API_GRAPHS_TARGETS)
docs-javase-api-modulegraph: $(JAVASE_API_MODULEGRAPH_TARGETS)
docs-reference-api-javadoc: $(REFERENCE_API_JAVADOC_TARGETS) $(REFERENCE_API_CUSTOM_TARGETS)
docs-reference-api-graphs: $(REFERENCE_API_GRAPHS_TARGETS)
docs-reference-api-modulegraph: $(REFERENCE_API_MODULEGRAPH_TARGETS)
docs-jdk-specs: $(JDK_SPECS_TARGETS)
@@ -814,12 +738,12 @@ docs-zip: $(ZIP_TARGETS)
docs-specs-zip: $(SPECS_ZIP_TARGETS)
all: docs-jdk-api-javadoc docs-jdk-api-graphs docs-javase-api-javadoc \
docs-javase-api-graphs docs-reference-api-javadoc \
docs-reference-api-graphs docs-jdk-specs docs-jdk-index docs-zip \
all: docs-jdk-api-javadoc docs-jdk-api-modulegraph docs-javase-api-javadoc \
docs-javase-api-modulegraph docs-reference-api-javadoc \
docs-reference-api-modulegraph docs-jdk-specs docs-jdk-index docs-zip \
docs-specs-zip
.PHONY: default all docs-jdk-api-javadoc docs-jdk-api-graphs \
docs-javase-api-javadoc docs-javase-api-graphs \
docs-reference-api-javadoc docs-reference-api-graphs docs-jdk-specs \
.PHONY: default all docs-jdk-api-javadoc docs-jdk-api-modulegraph \
docs-javase-api-javadoc docs-javase-api-modulegraph \
docs-reference-api-javadoc docs-reference-api-modulegraph docs-jdk-specs \
docs-jdk-index docs-zip docs-specs-zip

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2012, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -52,6 +52,7 @@ help:
$(info $(_) make docs # Create all docs)
$(info $(_) make docs-jdk-api # Create just JDK javadocs)
$(info $(_) make bootcycle-images # Build images twice, second time with newly built JDK)
$(info $(_) make install # Install the generated images locally)
$(info $(_) make check # Run basic testing (currently tier1))
$(info $(_) make test-<test> # Run test, e.g. test-tier1)
$(info $(_) make test TEST=<t> # Run test(s) given by TEST specification)
@@ -124,11 +125,6 @@ test-prebuilt:
$(MAKE) --no-print-directory -r -R -I make/common/ -f make/RunTestsPrebuilt.gmk \
test-prebuilt CUSTOM_MAKE_DIR=$(CUSTOM_MAKE_DIR) TEST="$(TEST)" )
test-prebuilt-with-exit-code:
@( cd $(topdir) && \
$(MAKE) --no-print-directory -r -R -I make/common/ -f make/RunTestsPrebuilt.gmk \
test-prebuilt-with-exit-code CUSTOM_MAKE_DIR=$(CUSTOM_MAKE_DIR) TEST="$(TEST)" )
# Alias for backwards compatibility
run-test-prebuilt: test-prebuilt

View File

@@ -267,6 +267,9 @@ else
endif
endif
FILTERED_PDBS := %jimage.stripped.pdb %jpackage.stripped.pdb %java.stripped.pdb \
%jimage.pdb %jpackage.pdb %java.pdb %jimage.map %jpackage.map %java.map
# Param 1 - either JDK or JRE
SetupCopyDebuginfo = \
$(foreach m, $(ALL_$1_MODULES), \
@@ -280,8 +283,8 @@ SetupCopyDebuginfo = \
$(eval $(call SetupCopyFiles, COPY_$1_CMDS_DEBUGINFO_$m, \
SRC := $(SUPPORT_OUTPUTDIR)/modules_cmds/$m, \
DEST := $($1_IMAGE_DIR)/$(CMDS_TARGET_SUBDIR), \
FILES := $(call FindDebuginfoFiles, \
$(SUPPORT_OUTPUTDIR)/modules_cmds/$m), \
FILES := $(filter-out $(FILTERED_PDBS), $(call FindDebuginfoFiles, \
$(SUPPORT_OUTPUTDIR)/modules_cmds/$m)), \
)) \
$(eval $1_TARGETS += $$(COPY_$1_CMDS_DEBUGINFO_$m)) \
)

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2012, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -138,10 +138,7 @@ ifeq ($(HAS_SPEC),)
# The spec files depend on the autoconf source code. This check makes sure
# the configuration is up to date after changes to configure.
$(SPECS): $(wildcard $(topdir)/make/autoconf/*) \
$(if $(CUSTOM_CONFIG_DIR), $(wildcard $(CUSTOM_CONFIG_DIR)/*)) \
$(addprefix $(topdir)/make/conf/, version-numbers.conf branding.conf) \
$(if $(CUSTOM_CONF_DIR), $(wildcard $(addprefix $(CUSTOM_CONF_DIR)/, \
version-numbers.conf branding.conf)))
$(if $(CUSTOM_CONFIG_DIR), $(wildcard $(CUSTOM_CONFIG_DIR)/*))
ifeq ($(CONF_CHECK), fail)
@echo Error: The configuration is not up to date for \
"'$(lastword $(subst /, , $(dir $@)))'."
@@ -280,20 +277,15 @@ else # HAS_SPEC=true
$(ECHO) $(CONFIGURE_COMMAND_LINE)
reconfigure:
ifneq ($(REAL_CONFIGURE_COMMAND_EXEC_FULL), )
$(ECHO) "Re-running configure using original command line '$(REAL_CONFIGURE_COMMAND_EXEC_SHORT) $(REAL_CONFIGURE_COMMAND_LINE)'"
$(eval RECONFIGURE_COMMAND := $(REAL_CONFIGURE_COMMAND_EXEC_FULL) $(REAL_CONFIGURE_COMMAND_LINE))
else ifneq ($(CONFIGURE_COMMAND_LINE), )
ifneq ($(CONFIGURE_COMMAND_LINE), )
$(ECHO) "Re-running configure using arguments '$(CONFIGURE_COMMAND_LINE)'"
$(eval RECONFIGURE_COMMAND := $(BASH) $(TOPDIR)/configure $(CONFIGURE_COMMAND_LINE))
else
$(ECHO) "Re-running configure using default settings"
$(eval RECONFIGURE_COMMAND := $(BASH) $(TOPDIR)/configure)
endif
( cd $(CONFIGURE_START_DIR) && PATH="$(ORIGINAL_PATH)" AUTOCONF="$(AUTOCONF)" \
CUSTOM_ROOT="$(CUSTOM_ROOT)" \
CUSTOM_CONFIG_DIR="$(CUSTOM_CONFIG_DIR)" \
$(RECONFIGURE_COMMAND) )
$(BASH) $(TOPDIR)/configure $(CONFIGURE_COMMAND_LINE) )
##############################################################################
# The main target, for delegating into Main.gmk
@@ -327,7 +319,7 @@ else # HAS_SPEC=true
ifneq ($(PARALLEL_TARGETS), )
$(call PrepareFailureLogs)
$(call StartGlobalTimer)
$(call PrepareJavacServer)
$(call PrepareSmartJavac)
# JOBS will only be empty for a bootcycle-images recursive call
# or if specified via a make argument directly. In those cases
# treat it as NOT using jobs at all.
@@ -342,7 +334,7 @@ else # HAS_SPEC=true
cd $(TOPDIR) && $(MAKE) $(MAKE_ARGS) -j 1 -f make/Init.gmk \
HAS_SPEC=true on-failure ; \
exit $$exitcode ) )
$(call CleanupJavacServer)
$(call CleanupSmartJavac)
$(call StopGlobalTimer)
$(call ReportBuildTimes)
endif
@@ -354,7 +346,7 @@ else # HAS_SPEC=true
endif
on-failure:
$(call CleanupJavacServer)
$(call CleanupSmartJavac)
$(call StopGlobalTimer)
$(call ReportBuildTimes)
$(call PrintFailureReports)
@@ -367,11 +359,11 @@ else # HAS_SPEC=true
# Support targets for COMPARE_BUILD, used for makefile development
pre-compare-build:
$(call WaitForJavacServerFinish)
$(call WaitForSmartJavacFinish)
$(call PrepareCompareBuild)
post-compare-build:
$(call WaitForJavacServerFinish)
$(call WaitForSmartJavacFinish)
$(call CleanupCompareBuild)
$(call CompareBuildDoComparison)

View File

@@ -204,15 +204,6 @@ ifeq ($(HAS_SPEC),)
# Otherwise select those that contain the given CONF string
matching_confs := $$(strip $$(foreach var, $$(all_confs), \
$$(if $$(findstring $$(CONF), $$(var)), $$(var))))
ifneq ($$(filter $$(CONF), $$(matching_confs)), )
# If we found an exact match, use that
matching_confs := $$(CONF)
# Don't repeat this output on make restarts caused by including
# generated files.
ifeq ($$(MAKE_RESTARTS),)
$$(info Using exact match for CONF=$$(CONF) (other matches are possible))
endif
endif
endif
ifeq ($$(matching_confs),)
$$(info Error: No configurations found matching CONF=$$(CONF).)
@@ -435,10 +426,10 @@ else # $(HAS_SPEC)=true
# Compare first and second build. Ignore any error code from compare.sh.
$(ECHO) "Comparing between comparison rebuild (this/new) and baseline (other/old)"
$(if $(COMPARE_BUILD_COMP_DIR), \
+(cd $(COMPARE_BUILD_OUTPUTDIR) && ./compare.sh -vv $(COMPARE_BUILD_COMP_OPTS) \
+(cd $(COMPARE_BUILD_OUTPUTDIR) && ./compare.sh $(COMPARE_BUILD_COMP_OPTS) \
-2dirs $(COMPARE_BUILD_OUTPUTDIR)/$(COMPARE_BUILD_COMP_DIR) \
$(OUTPUTDIR)/$(COMPARE_BUILD_COMP_DIR) $(COMPARE_BUILD_IGNORE_RESULT)), \
+(cd $(COMPARE_BUILD_OUTPUTDIR) && ./compare.sh -vv $(COMPARE_BUILD_COMP_OPTS) \
+(cd $(COMPARE_BUILD_OUTPUTDIR) && ./compare.sh $(COMPARE_BUILD_COMP_OPTS) \
-o $(OUTPUTDIR) $(COMPARE_BUILD_IGNORE_RESULT)) \
)
endef
@@ -502,15 +493,15 @@ else # $(HAS_SPEC)=true
# Remove any javac server logs and port files. This
# prevents a new make run to reuse the previous servers.
define PrepareJavacServer
define PrepareSmartJavac
$(if $(JAVAC_SERVER_DIR), \
$(RM) -r $(JAVAC_SERVER_DIR) 2> /dev/null && \
$(MKDIR) -p $(JAVAC_SERVER_DIR) \
)
endef
define CleanupJavacServer
[ -f $(JAVAC_SERVER_DIR)/server.port ] && $(ECHO) Stopping javac server && \
define CleanupSmartJavac
[ -f $(JAVAC_SERVER_DIR)/server.port ] && $(ECHO) Stopping sjavac server && \
$(TOUCH) $(JAVAC_SERVER_DIR)/server.port.stop; true
endef
@@ -519,13 +510,13 @@ else # $(HAS_SPEC)=true
# move or remove the build output directory. Since we have no proper
# synchronization process, wait for a while and hope it helps. This is only
# used by build comparisons.
define WaitForJavacServerFinish
define WaitForSmartJavacFinish
$(if $(JAVAC_SERVER_DIR), \
sleep 5\
)
endef
else
define WaitForJavacServerFinish
define WaitForSmartJavacFinish
endef
endif

43
make/Install.gmk Normal file
View File

@@ -0,0 +1,43 @@
#
# Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
default: install
include $(SPEC)
BINARIES := $(notdir $(wildcard $(JDK_IMAGE_DIR)/bin/*))
INSTALLDIR := openjdk-$(VERSION_SHORT)
# Install the jdk image, in a very crude way. Not taking into
# account, how to install properly on macosx or windows etc.
install:
echo Installing jdk image into $(INSTALL_PREFIX)/jvm/$(INSTALLDIR)
echo and creating $(words $(BINARIES)) links from $(INSTALL_PREFIX)/bin into the jdk.
$(MKDIR) -p $(INSTALL_PREFIX)/jvm/$(INSTALLDIR)
$(RM) -r $(INSTALL_PREFIX)/jvm/$(INSTALLDIR)/*
$(CP) -rp $(JDK_IMAGE_DIR)/* $(INSTALL_PREFIX)/jvm/$(INSTALLDIR)
$(MKDIR) -p $(INSTALL_PREFIX)/bin
$(RM) $(addprefix $(INSTALL_PREFIX)/bin/, $(BINARIES))
$(foreach b, $(BINARIES), $(LN) -s $(INSTALL_PREFIX)/jvm/$(INSTALLDIR)/bin/$b $(INSTALL_PREFIX)/bin/$b &&) true

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -46,10 +46,8 @@ JIMAGE_PKGS := \
jdk/internal/jrtfs \
#
# Compile jrt-fs.jar with the interim compiler, as it
# ends up in the image, this will ensure reproducible classes
$(eval $(call SetupJavaCompilation, BUILD_JRTFS, \
COMPILER := interim, \
COMPILER := bootjdk, \
DISABLED_WARNINGS := options, \
TARGET_RELEASE := $(TARGET_RELEASE_JDK8), \
SRC := $(TOPDIR)/src/java.base/share/classes, \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -257,46 +257,6 @@ $(eval $(call SetupTarget, hotspot-ide-project, \
ARGS := -I$(TOPDIR)/make/hotspot, \
))
$(eval $(call SetupTarget, eclipse-java-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=java SHARED=false, \
))
$(eval $(call SetupTarget, eclipse-hotspot-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=hotspot SHARED=false, \
))
$(eval $(call SetupTarget, eclipse-native-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=native SHARED=false, \
))
$(eval $(call SetupTarget, eclipse-mixed-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make SHARED=false, \
))
$(eval $(call SetupTarget, eclipse-shared-java-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=java SHARED=true, \
))
$(eval $(call SetupTarget, eclipse-shared-hotspot-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=hotspot SHARED=true, \
))
$(eval $(call SetupTarget, eclipse-shared-native-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make WORKSPACE=native SHARED=true, \
))
$(eval $(call SetupTarget, eclipse-shared-mixed-env, \
MAKEFILE := ide/eclipse/CreateWorkspace, \
ARGS := --always-make SHARED=true, \
))
ALL_TARGETS += $(HOTSPOT_VARIANT_TARGETS) $(HOTSPOT_VARIANT_GENSRC_TARGETS) \
$(HOTSPOT_VARIANT_LIBS_TARGETS)
@@ -506,15 +466,15 @@ ALL_TARGETS += bootcycle-images
# Docs targets
# If building full docs, to complete docs-*-api we need both the javadoc and
# graphs targets.
# modulegraph targets.
$(eval $(call SetupTarget, docs-jdk-api-javadoc, \
MAKEFILE := Docs, \
TARGET := docs-jdk-api-javadoc, \
))
$(eval $(call SetupTarget, docs-jdk-api-graphs, \
$(eval $(call SetupTarget, docs-jdk-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-jdk-api-graphs, \
TARGET := docs-jdk-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
))
@@ -523,9 +483,9 @@ $(eval $(call SetupTarget, docs-javase-api-javadoc, \
TARGET := docs-javase-api-javadoc, \
))
$(eval $(call SetupTarget, docs-javase-api-graphs, \
$(eval $(call SetupTarget, docs-javase-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-javase-api-graphs, \
TARGET := docs-javase-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
))
@@ -534,9 +494,9 @@ $(eval $(call SetupTarget, docs-reference-api-javadoc, \
TARGET := docs-reference-api-javadoc, \
))
$(eval $(call SetupTarget, docs-reference-api-graphs, \
$(eval $(call SetupTarget, docs-reference-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-reference-api-graphs, \
TARGET := docs-reference-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
))
@@ -747,22 +707,6 @@ ifeq ($(BUILD_FAILURE_HANDLER), true)
))
endif
ifeq ($(BUILD_JTREG_TEST_THREAD_FACTORY), true)
# Builds the test thread factory jtreg extension
$(eval $(call SetupTarget, build-test-test-thread-factory, \
MAKEFILE := test/BuildJtregTestThreadFactory, \
TARGET := build, \
DEPS := interim-langtools exploded-image, \
))
# Copies the jtreg test thread factory into the test image
$(eval $(call SetupTarget, test-image-test-thread-factory, \
MAKEFILE := test/BuildJtregTestThreadFactory, \
TARGET := images, \
DEPS := build-test-test-thread-factory, \
))
endif
$(eval $(call SetupTarget, build-microbenchmark, \
MAKEFILE := test/BuildMicrobenchmark, \
DEPS := interim-langtools exploded-image, \
@@ -844,6 +788,14 @@ ifeq ($(JCOV_ENABLED), true)
))
endif
################################################################################
# Install targets
$(eval $(call SetupTarget, install, \
MAKEFILE := Install, \
DEPS := product-images, \
))
################################################################################
#
# Dependency declarations between targets.
@@ -1155,14 +1107,9 @@ docs-reference-api: docs-reference-api-javadoc
# If we're building full docs, we must also generate the module graphs to
# get non-broken api documentation.
ifeq ($(ENABLE_FULL_DOCS), true)
docs-jdk-api: docs-jdk-api-graphs
docs-javase-api: docs-javase-api-graphs
docs-reference-api: docs-reference-api-graphs
# We must generate javadoc first so we know what graphs are needed
docs-jdk-api-graphs: docs-jdk-api-javadoc
docs-javase-api-graphs: docs-javase-api-javadoc
docs-reference-api-graphs: docs-reference-api-javadoc
docs-jdk-api: docs-jdk-api-modulegraph
docs-javase-api: docs-javase-api-modulegraph
docs-reference-api: docs-reference-api-modulegraph
endif
docs-jdk: docs-jdk-api docs-jdk-specs docs-jdk-index
@@ -1235,10 +1182,6 @@ ifeq ($(BUILD_FAILURE_HANDLER), true)
test-image: test-image-failure-handler
endif
ifeq ($(BUILD_JTREG_TEST_THREAD_FACTORY), true)
test-image: test-image-test-thread-factory
endif
ifneq ($(JMH_CORE_JAR), )
test-image: build-microbenchmark
endif

View File

@@ -51,7 +51,6 @@ define create-info-file
$(if $(VENDOR_VERSION_STRING), \
$(call info-file-item, "IMPLEMENTOR_VERSION", "$(VENDOR_VERSION_STRING)"))
$(call info-file-item, "JAVA_VERSION_DATE", "$(VERSION_DATE)")
$(call info-file-item, "JAVA_RUNTIME_VERSION", "$(VERSION_STRING)")
$(call info-file-item, "OS_NAME", "$(RELEASE_FILE_OS_NAME)")
$(call info-file-item, "OS_ARCH", "$(RELEASE_FILE_OS_ARCH)")
$(call info-file-item, "LIBC", "$(RELEASE_FILE_LIBC)")

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -93,9 +93,6 @@ endif
JTREG_FAILURE_HANDLER_DIR := $(TEST_IMAGE_DIR)/failure_handler
JTREG_FAILURE_HANDLER := $(JTREG_FAILURE_HANDLER_DIR)/jtregFailureHandler.jar
JTREG_TEST_THREAD_FACTORY_DIR := $(TEST_IMAGE_DIR)/jtreg_test_thread_factory
JTREG_TEST_THREAD_FACTORY_JAR := $(JTREG_TEST_THREAD_FACTORY_DIR)/jtregTestThreadFactory.jar
JTREG_FAILURE_HANDLER_TIMEOUT ?= 0
ifneq ($(wildcard $(JTREG_FAILURE_HANDLER)), )
@@ -178,8 +175,7 @@ ifeq ($(TEST_JOBS), 0)
c = c * $(TEST_JOBS_FACTOR_JDL); \
c = c * $(TEST_JOBS_FACTOR_MACHINE); \
if (c < 1) c = 1; \
c = c + 0.5; \
printf "%d", c; \
printf "%.0f", c; \
}')
endif
@@ -200,12 +196,11 @@ $(eval $(call SetTestOpt,JAVA_OPTIONS,JTREG))
$(eval $(call SetTestOpt,JOBS,JTREG))
$(eval $(call SetTestOpt,TIMEOUT_FACTOR,JTREG))
$(eval $(call SetTestOpt,FAILURE_HANDLER_TIMEOUT,JTREG))
$(eval $(call SetTestOpt,REPORT,JTREG))
$(eval $(call ParseKeywordVariable, JTREG, \
SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR FAILURE_HANDLER_TIMEOUT \
TEST_MODE ASSERT VERBOSE RETAIN TEST_THREAD_FACTORY MAX_MEM RUN_PROBLEM_LISTS \
RETRY_COUNT REPEAT_COUNT MAX_OUTPUT REPORT $(CUSTOM_JTREG_SINGLE_KEYWORDS), \
TEST_MODE ASSERT VERBOSE RETAIN MAX_MEM RUN_PROBLEM_LISTS \
RETRY_COUNT REPEAT_COUNT MAX_OUTPUT $(CUSTOM_JTREG_SINGLE_KEYWORDS), \
STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS KEYWORDS \
EXTRA_PROBLEM_LISTS LAUNCHER_OPTIONS \
$(CUSTOM_JTREG_STRING_KEYWORDS), \
@@ -357,7 +352,7 @@ ExpandJtregPath = \
# with test id: dir/Test.java#selection -> Test.java#selection -> .java#selection -> #selection
# without: dir/Test.java -> Test.java -> .java -> <<empty string>>
TestID = \
$(subst .jasm,,$(subst .sh,,$(subst .html,,$(subst .java,,$(suffix $(notdir $1))))))
$(subst .java,,$(suffix $(notdir $1)))
# The test id starting with a hash (#testid) will be stripped by all
# evals in ParseJtregTestSelectionInner and will be reinserted by calling
@@ -602,7 +597,7 @@ define SetupRunMicroTestBody
$1_JMH_JVM_ARGS += $$(MICRO_VM_OPTIONS) $$(MICRO_JAVA_OPTIONS)
endif
$1_MICRO_VM_OPTIONS := -jvmArgsPrepend $(call ShellQuote,$$($1_JMH_JVM_ARGS))
$1_MICRO_VM_OPTIONS := -jvmArgs $(call ShellQuote,$$($1_JMH_JVM_ARGS))
ifneq ($$(MICRO_ITER), )
$1_MICRO_ITER := -i $$(MICRO_ITER)
@@ -743,13 +738,13 @@ define SetupRunJtregTestBody
# we may end up with a lot of JVM's
$1_JTREG_MAX_RAM_PERCENTAGE := $$(shell $(AWK) 'BEGIN { print 25 / $$($1_JTREG_JOBS); }')
JTREG_TIMEOUT_FACTOR ?= 4
JTREG_VERBOSE ?= fail,error,summary
JTREG_RETAIN ?= fail,error
JTREG_TEST_THREAD_FACTORY ?=
JTREG_RUN_PROBLEM_LISTS ?= false
JTREG_RETRY_COUNT ?= 0
JTREG_REPEAT_COUNT ?= 0
JTREG_REPORT ?= files
ifneq ($$(JTREG_RETRY_COUNT), 0)
ifneq ($$(JTREG_REPEAT_COUNT), 0)
@@ -759,14 +754,6 @@ define SetupRunJtregTestBody
endif
endif
ifneq ($$(JTREG_TEST_THREAD_FACTORY), )
$1_JTREG_BASIC_OPTIONS += -testThreadFactoryPath:$$(JTREG_TEST_THREAD_FACTORY_JAR)
$1_JTREG_BASIC_OPTIONS += -testThreadFactory:$$(JTREG_TEST_THREAD_FACTORY)
$1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
$$(addprefix $$($1_TEST_ROOT)/, ProblemList-$$(JTREG_TEST_THREAD_FACTORY).txt) \
))
endif
ifneq ($$(JTREG_LAUNCHER_OPTIONS), )
$1_JTREG_LAUNCHER_OPTIONS += $$(JTREG_LAUNCHER_OPTIONS)
endif
@@ -783,13 +770,10 @@ define SetupRunJtregTestBody
# Make sure the tmp dir is normalized as some tests will react badly otherwise
$1_TEST_TMP_DIR := $$(abspath $$($1_TEST_SUPPORT_DIR)/tmp)
# test.boot.jdk is used by some test cases that want to execute a previous
# version of the JDK.
$1_JTREG_BASIC_OPTIONS += -$$($1_JTREG_TEST_MODE) \
-verbose:$$(JTREG_VERBOSE) -retain:$$(JTREG_RETAIN) \
-concurrency:$$($1_JTREG_JOBS) -timeoutFactor:$$(JTREG_TIMEOUT_FACTOR) \
-vmoption:-XX:MaxRAMPercentage=$$($1_JTREG_MAX_RAM_PERCENTAGE) \
-vmoption:-Dtest.boot.jdk="$$(BOOT_JDK)" \
-vmoption:-Djava.io.tmpdir="$$($1_TEST_TMP_DIR)"
$1_JTREG_BASIC_OPTIONS += -automatic -ignore:quiet
@@ -799,10 +783,8 @@ define SetupRunJtregTestBody
$1_JTREG_BASIC_OPTIONS += -e:JIB_DATA_DIR
# If running on Windows, propagate the _NT_SYMBOL_PATH to enable
# symbol lookup in hserr files
# The minidumps are disabled by default on client Windows, so enable them
ifeq ($$(call isTargetOs, windows), true)
$1_JTREG_BASIC_OPTIONS += -e:_NT_SYMBOL_PATH
$1_JTREG_BASIC_OPTIONS += -vmoption:-XX:+CreateCoredumpOnCrash
else ifeq ($$(call isTargetOs, linux), true)
$1_JTREG_BASIC_OPTIONS += -e:_JVM_DWARF_PATH=$$(SYMBOLS_IMAGE_DIR)
endif
@@ -830,28 +812,6 @@ define SetupRunJtregTestBody
$1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$($1_JTREG_PROBLEM_LIST))
endif
JTREG_ALL_OPTIONS := $$(JTREG_JAVA_OPTIONS) $$(JTREG_VM_OPTIONS)
JTREG_AUTO_PROBLEM_LISTS :=
JTREG_AUTO_TIMEOUT_FACTOR := 4
ifneq ($$(findstring -Xcomp, $$(JTREG_ALL_OPTIONS)), )
JTREG_AUTO_PROBLEM_LISTS += ProblemList-Xcomp.txt
JTREG_AUTO_TIMEOUT_FACTOR := 10
endif
ifneq ($$(findstring -XX:+UseZGC, $$(JTREG_ALL_OPTIONS)), )
ifneq ($$(findstring -XX:-ZGenerational, $$(JTREG_ALL_OPTIONS)), )
JTREG_AUTO_PROBLEM_LISTS += ProblemList-zgc.txt
else
JTREG_AUTO_PROBLEM_LISTS += ProblemList-generational-zgc.txt
endif
endif
ifneq ($$(findstring -XX:+UseShenandoahGC, $$(JTREG_ALL_OPTIONS)), )
JTREG_AUTO_PROBLEM_LISTS += ProblemList-shenandoah.txt
endif
ifneq ($$(JTREG_EXTRA_PROBLEM_LISTS), )
# Accept both absolute paths as well as relative to the current test root.
$1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
@@ -883,31 +843,17 @@ define SetupRunJtregTestBody
$$(eval $$(call SetupRunJtregTestCustom, $1))
# SetupRunJtregTestCustom might also adjust JTREG_AUTO_ variables
# so set the final results after setting values from custom setup
ifneq ($$(JTREG_AUTO_PROBLEM_LISTS), )
# Accept both absolute paths as well as relative to the current test root.
$1_JTREG_BASIC_OPTIONS += $$(addprefix $$(JTREG_PROBLEM_LIST_PREFIX), $$(wildcard \
$$(JTREG_AUTO_PROBLEM_LISTS) \
$$(addprefix $$($1_TEST_ROOT)/, $$(JTREG_AUTO_PROBLEM_LISTS)) \
))
endif
JTREG_TIMEOUT_FACTOR ?= $$(JTREG_AUTO_TIMEOUT_FACTOR)
clean-outputdirs-$1:
clean-workdir-$1:
$$(RM) -r $$($1_TEST_SUPPORT_DIR)
$$(RM) -r $$($1_TEST_RESULTS_DIR)
$1_COMMAND_LINE := \
$$(JTREG_JAVA) $$($1_JTREG_LAUNCHER_OPTIONS) \
$$(JAVA) $$($1_JTREG_LAUNCHER_OPTIONS) \
-Dprogram=jtreg -jar $$(JT_HOME)/lib/jtreg.jar \
$$($1_JTREG_BASIC_OPTIONS) \
-testjdk:$$(JDK_UNDER_TEST) \
-dir:$$(JTREG_TOPDIR) \
-reportDir:$$($1_TEST_RESULTS_DIR) \
-workDir:$$($1_TEST_SUPPORT_DIR) \
-report:$${JTREG_REPORT} \
$$$${JTREG_STATUS} \
$$(JTREG_OPTIONS) \
$$(JTREG_FAILURE_HANDLER_OPTIONS) \
@@ -943,7 +889,7 @@ define SetupRunJtregTestBody
done
endif
run-test-$1: pre-run-test clean-outputdirs-$1
run-test-$1: pre-run-test clean-workdir-$1
$$(call LogWarn)
$$(call LogWarn, Running test '$$($1_TEST)')
$$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR) \
@@ -980,9 +926,9 @@ define SetupRunJtregTestBody
$$(eval $1_TOTAL := 1) \
)
$1: run-test-$1 parse-test-$1 clean-outputdirs-$1
$1: run-test-$1 parse-test-$1 clean-workdir-$1
TARGETS += $1 run-test-$1 parse-test-$1 clean-outputdirs-$1
TARGETS += $1 run-test-$1 parse-test-$1 clean-workdir-$1
TEST_TARGETS += parse-test-$1
endef

View File

@@ -122,7 +122,6 @@ $(eval $(call SetupVariable,JT_HOME))
$(eval $(call SetupVariable,JDK_IMAGE_DIR,$(OUTPUTDIR)/images/jdk))
$(eval $(call SetupVariable,TEST_IMAGE_DIR,$(OUTPUTDIR)/images/test))
$(eval $(call SetupVariable,SYMBOLS_IMAGE_DIR,$(OUTPUTDIR)/images/symbols,NO_CHECK))
$(eval $(call SetupVariable,JTREG_JDK,$(BOOT_JDK)))
# Provide default values for tools that we need
$(eval $(call SetupVariable,MAKE,make,NO_CHECK))
@@ -158,10 +157,6 @@ ifeq ($(UNAME_OS), CYGWIN)
OPENJDK_TARGET_OS := windows
OPENJDK_TARGET_OS_TYPE := windows
OPENJDK_TARGET_OS_ENV := windows.cygwin
else ifeq ($(UNAME_OS), MINGW64)
OPENJDK_TARGET_OS := windows
OPENJDK_TARGET_OS_TYPE := windows
OPENJDK_TARGET_OS_ENV := windows.msys2
else
OPENJDK_TARGET_OS_TYPE:=unix
ifeq ($(UNAME_OS), Linux)
@@ -174,9 +169,6 @@ else
OPENJDK_TARGET_OS_ENV := $(OPENJDK_TARGET_OS)
endif
# Sanity check env detection
$(info Detected target OS, type and env: [$(OPENJDK_TARGET_OS)] [$(OPENJDK_TARGET_OS_TYPE)] [$(OPENJDK_TARGET_OS_ENV)])
# Assume little endian unless otherwise specified
OPENJDK_TARGET_CPU_ENDIAN := little
@@ -216,9 +208,9 @@ else ifeq ($(OPENJDK_TARGET_OS), macosx)
else ifeq ($(OPENJDK_TARGET_OS), windows)
NUM_CORES := $(NUMBER_OF_PROCESSORS)
MEMORY_SIZE := $(shell \
$(EXPR) `powershell -Command \
"(Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory" \
| $(SED) 's/\\r//g' ` / 1024 / 1024 \
$(EXPR) `wmic computersystem get totalphysicalmemory -value \
| $(GREP) = | $(SED) 's/\\r//g' \
| $(CUT) -d "=" -f 2-` / 1024 / 1024 \
)
endif
ifeq ($(NUM_CORES), )
@@ -256,7 +248,6 @@ $(call CreateNewSpec, $(NEW_SPEC), \
TOPDIR := $(TOPDIR), \
OUTPUTDIR := $(OUTPUTDIR), \
BOOT_JDK := $(BOOT_JDK), \
JTREG_JDK := $(JTREG_JDK), \
JT_HOME := $(JT_HOME), \
JDK_IMAGE_DIR := $(JDK_IMAGE_DIR), \
JCOV_IMAGE_DIR := $(JCOV_IMAGE_DIR), \
@@ -304,11 +295,6 @@ test-prebuilt:
@cd $(TOPDIR) && $(MAKE) $(MAKE_ARGS) -f make/RunTests.gmk run-test \
TEST="$(TEST)"
test-prebuilt-with-exit-code: test-prebuilt
@if test -f $(MAKESUPPORT_OUTPUTDIR)/exit-with-error ; then \
exit 1 ; \
fi
all: test-prebuilt
.PHONY: default all test-prebuilt

View File

@@ -124,8 +124,6 @@ JAR := $(FIXPATH) $(JAR_CMD)
JLINK := $(FIXPATH) $(JLINK_CMD)
JMOD := $(FIXPATH) $(JMOD_CMD)
JTREG_JAVA := $(FIXPATH) $(JTREG_JDK)/bin/java $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
BUILD_JAVA := $(JDK_IMAGE_DIR)/bin/JAVA
################################################################################
# Some common tools. Assume most common name and no path.

View File

@@ -88,9 +88,9 @@ ifeq ($(call isTargetOs, windows), true)
$(eval $(call SetupZipArchive,BUILD_JGSS_BIN_ZIP, \
SRC := $(SUPPORT_OUTPUTDIR), \
INCLUDE_FILES := modules_libs/java.security.jgss/w2k_lsa_auth.dll \
modules_libs/java.security.jgss/w2k_lsa_auth.dll.diz \
modules_libs/java.security.jgss/w2k_lsa_auth.dll.map \
modules_libs/java.security.jgss/w2k_lsa_auth.dll.pdb, \
modules_libs/java.security.jgss/w2k_lsa_auth.diz \
modules_libs/java.security.jgss/w2k_lsa_auth.map \
modules_libs/java.security.jgss/w2k_lsa_auth.pdb, \
ZIP := $(IMAGES_OUTPUTDIR)/$(JGSS_ZIP_NAME)))
TARGETS += $(IMAGES_OUTPUTDIR)/$(JGSS_ZIP_NAME)

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -31,7 +31,6 @@ include JavaCompilation.gmk
include Modules.gmk
SRC_ZIP_WORK_DIR := $(SUPPORT_OUTPUTDIR)/src
$(if $(filter $(TOPDIR)/%, $(SUPPORT_OUTPUTDIR)), $(eval SRC_ZIP_BASE := $(TOPDIR)), $(eval SRC_ZIP_BASE := $(SUPPORT_OUTPUTDIR)))
# Hook to include the corresponding custom file, if present.
$(eval $(call IncludeCustomExtension, ZipSource.gmk))
@@ -46,10 +45,10 @@ ALL_MODULES := $(FindAllModules)
# again to create src.zip.
$(foreach m, $(ALL_MODULES), \
$(foreach d, $(call FindModuleSrcDirs, $m), \
$(eval $d_TARGET := $(SRC_ZIP_WORK_DIR)/$(patsubst $(TOPDIR)/%,%,$(patsubst $(SUPPORT_OUTPUTDIR)/%,%,$d))/$m) \
$(eval $d_TARGET := $(SRC_ZIP_WORK_DIR)/$(patsubst $(TOPDIR)/%,%,$d)/$m) \
$(if $(SRC_GENERATED), , \
$(eval $$($d_TARGET): $d ; \
$$(if $(filter $(SRC_ZIP_BASE)/%, $d), $$(link-file-relative), $$(link-file-absolute)) \
$$(if $(filter $(TOPDIR)/%, $d), $$(link-file-relative), $$(link-file-absolute)) \
) \
) \
$(eval SRC_ZIP_SRCS += $$($d_TARGET)) \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -31,11 +31,6 @@ AC_DEFUN_ONCE([BASIC_INIT],
[
# Save the original command line. This is passed to us by the wrapper configure script.
AC_SUBST(CONFIGURE_COMMAND_LINE)
# We might have the original command line if the wrapper was called by some
# other script.
AC_SUBST(REAL_CONFIGURE_COMMAND_EXEC_SHORT)
AC_SUBST(REAL_CONFIGURE_COMMAND_EXEC_FULL)
AC_SUBST(REAL_CONFIGURE_COMMAND_LINE)
# AUTOCONF might be set in the environment by the user. Preserve for "make reconfigure".
AC_SUBST(AUTOCONF)
# Save the path variable before it gets changed
@@ -60,7 +55,6 @@ AC_DEFUN([BASIC_CHECK_LEFTOVER_OVERRIDDEN],
###############################################################################
# Setup basic configuration paths, and platform-specific stuff related to PATHs.
# Make sure to only use tools set up in BASIC_SETUP_FUNDAMENTAL_TOOLS.
AC_DEFUN_ONCE([BASIC_SETUP_PATHS],
[
# Save the current directory this script was started from
@@ -102,29 +96,6 @@ AC_DEFUN_ONCE([BASIC_SETUP_PATHS],
AUTOCONF_DIR=$TOPDIR/make/autoconf
])
###############################################################################
# Setup what kind of build environment type we have (CI or local developer)
AC_DEFUN_ONCE([BASIC_SETUP_BUILD_ENV],
[
if test "x$CI" = "xtrue"; then
DEFAULT_BUILD_ENV="ci"
AC_MSG_NOTICE([CI environment variable set to $CI])
else
DEFAULT_BUILD_ENV="dev"
fi
UTIL_ARG_WITH(NAME: build-env, TYPE: literal,
RESULT: BUILD_ENV,
VALID_VALUES: [auto dev ci], DEFAULT: auto,
CHECKING_MSG: [for build environment type],
DESC: [select build environment type (affects certain default values)],
IF_AUTO: [
RESULT=$DEFAULT_BUILD_ENV
]
)
AC_SUBST(BUILD_ENV)
])
###############################################################################
# Evaluates platform specific overrides for devkit variables.
# $1: Name of variable
@@ -168,15 +139,6 @@ AC_DEFUN([BASIC_SETUP_XCODE_SYSROOT],
if test $? -ne 0; then
AC_MSG_ERROR([The xcodebuild tool in the devkit reports an error: $XCODEBUILD_OUTPUT])
fi
elif test "x$TOOLCHAIN_PATH" != x; then
UTIL_LOOKUP_PROGS(XCODEBUILD, xcodebuild, $TOOLCHAIN_PATH)
if test "x$XCODEBUILD" != x; then
XCODEBUILD_OUTPUT=`"$XCODEBUILD" -version 2>&1`
if test $? -ne 0; then
AC_MSG_WARN([Ignoring the located xcodebuild tool $XCODEBUILD due to an error: $XCODEBUILD_OUTPUT])
XCODEBUILD=
fi
fi
else
UTIL_LOOKUP_PROGS(XCODEBUILD, xcodebuild)
if test "x$XCODEBUILD" != x; then
@@ -326,22 +288,6 @@ AC_DEFUN_ONCE([BASIC_SETUP_DEVKIT],
[UTIL_PREPEND_TO_PATH([TOOLCHAIN_PATH],$with_toolchain_path)]
)
AC_ARG_WITH([xcode-path], [AS_HELP_STRING([--with-xcode-path],
[set up toolchain on Mac OS using a path to an Xcode installation])])
if test "x$with_xcode_path" != x; then
if test "x$OPENJDK_BUILD_OS" = "xmacosx"; then
UTIL_PREPEND_TO_PATH([TOOLCHAIN_PATH],
$with_xcode_path/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin:$with_xcode_path/Contents/Developer/usr/bin)
else
AC_MSG_WARN([Option --with-xcode-path is only valid on Mac OS, ignoring.])
fi
fi
AC_MSG_CHECKING([for toolchain path])
AC_MSG_RESULT([$TOOLCHAIN_PATH])
AC_SUBST(TOOLCHAIN_PATH)
AC_ARG_WITH([extra-path], [AS_HELP_STRING([--with-extra-path],
[prepend these directories to the default path])],
[UTIL_PREPEND_TO_PATH([EXTRA_PATH],$with_extra_path)]
@@ -360,6 +306,10 @@ AC_DEFUN_ONCE([BASIC_SETUP_DEVKIT],
AC_MSG_RESULT([$SYSROOT])
AC_SUBST(SYSROOT)
AC_MSG_CHECKING([for toolchain path])
AC_MSG_RESULT([$TOOLCHAIN_PATH])
AC_SUBST(TOOLCHAIN_PATH)
AC_MSG_CHECKING([for extra path])
AC_MSG_RESULT([$EXTRA_PATH])
])
@@ -406,9 +356,9 @@ AC_DEFUN_ONCE([BASIC_SETUP_OUTPUT_DIR],
# WARNING: This might be a bad thing to do. You need to be sure you want to
# have a configuration in this directory. Do some sanity checks!
if test ! -e "$OUTPUTDIR/spec.gmk" && test ! -e "$OUTPUTDIR/configure-support/generated-configure.sh"; then
# If we have a spec.gmk or configure-support/generated-configure.sh,
# we have run here before and we are OK. Otherwise, check for other files
if test ! -e "$OUTPUTDIR/spec.gmk"; then
# If we have a spec.gmk, we have run here before and we are OK. Otherwise, check for
# other files
files_present=`$LS $OUTPUTDIR`
# Configure has already touched config.log and confdefs.h in the current dir when this check
# is performed.
@@ -423,9 +373,8 @@ AC_DEFUN_ONCE([BASIC_SETUP_OUTPUT_DIR],
AC_MSG_NOTICE([Current directory is $CONFIGURE_START_DIR.])
AC_MSG_NOTICE([Since this is not the source root, configure will output the configuration here])
AC_MSG_NOTICE([(as opposed to creating a configuration in <src_root>/build/<conf-name>).])
AC_MSG_NOTICE([However, this directory is not empty, additionally to some allowed files])
AC_MSG_NOTICE([it contains $filtered_files.])
AC_MSG_NOTICE([This is not allowed, since it could seriously mess up just about everything.])
AC_MSG_NOTICE([However, this directory is not empty. This is not allowed, since it could])
AC_MSG_NOTICE([seriously mess up just about everything.])
AC_MSG_NOTICE([Try 'cd $TOPDIR' and restart configure])
AC_MSG_NOTICE([(or create a new empty directory and cd to it).])
AC_MSG_ERROR([Will not continue creating configuration in $CONFIGURE_START_DIR])
@@ -479,11 +428,7 @@ AC_DEFUN([BASIC_CHECK_DIR_ON_LOCAL_DISK],
# df on AIX does not understand -l. On modern AIXes it understands "-T local" which
# is the same. On older AIXes we just continue to live with a "not local build" warning.
if test "x$OPENJDK_TARGET_OS" = xaix; then
if "$DF -T local > /dev/null 2>&1"; then
DF_LOCAL_ONLY_OPTION='-T local'
else # AIX may use GNU-utils instead
DF_LOCAL_ONLY_OPTION='-l'
fi
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl1"; then
# In WSL1, we can only build on a drvfs file system (that is, a mounted real Windows drive)
DF_LOCAL_ONLY_OPTION='-t drvfs'

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -24,13 +24,8 @@
#
###############################################################################
# It is recommended to use exactly this version of pandoc, especially for
# re-generating checked in html files
RECOMMENDED_PANDOC_VERSION=2.19.2
###############################################################################
# Setup the most fundamental tools, used for setting up build platform and
# path handling.
# Setup the most fundamental tools that relies on not much else to set up,
# but is used by much of the early bootstrap code.
AC_DEFUN_ONCE([BASIC_SETUP_FUNDAMENTAL_TOOLS],
[
# Bootstrapping: These tools are needed by UTIL_LOOKUP_PROGS
@@ -42,28 +37,7 @@ AC_DEFUN_ONCE([BASIC_SETUP_FUNDAMENTAL_TOOLS],
UTIL_CHECK_NONEMPTY(FILE)
AC_PATH_PROGS(LDD, ldd)
# Required tools
UTIL_REQUIRE_PROGS(ECHO, echo)
UTIL_REQUIRE_PROGS(TR, tr)
UTIL_REQUIRE_PROGS(UNAME, uname)
UTIL_REQUIRE_PROGS(WC, wc)
# Required tools with some special treatment
UTIL_REQUIRE_SPECIAL(GREP, [AC_PROG_GREP])
UTIL_REQUIRE_SPECIAL(EGREP, [AC_PROG_EGREP])
UTIL_REQUIRE_SPECIAL(SED, [AC_PROG_SED])
# Tools only needed on some platforms
UTIL_LOOKUP_PROGS(PATHTOOL, cygpath wslpath)
UTIL_LOOKUP_PROGS(CMD, cmd.exe, $PATH:/cygdrive/c/windows/system32:/mnt/c/windows/system32:/c/windows/system32)
])
###############################################################################
# Setup further tools that should be resolved early but after setting up
# build platform and path handling.
AC_DEFUN_ONCE([BASIC_SETUP_TOOLS],
[
# Required tools
# First are all the fundamental required tools.
UTIL_REQUIRE_PROGS(BASH, bash)
UTIL_REQUIRE_PROGS(CAT, cat)
UTIL_REQUIRE_PROGS(CHMOD, chmod)
@@ -71,6 +45,7 @@ AC_DEFUN_ONCE([BASIC_SETUP_TOOLS],
UTIL_REQUIRE_PROGS(CUT, cut)
UTIL_REQUIRE_PROGS(DATE, date)
UTIL_REQUIRE_PROGS(DIFF, gdiff diff)
UTIL_REQUIRE_PROGS(ECHO, echo)
UTIL_REQUIRE_PROGS(EXPR, expr)
UTIL_REQUIRE_PROGS(FIND, find)
UTIL_REQUIRE_PROGS(GUNZIP, gunzip)
@@ -92,20 +67,27 @@ AC_DEFUN_ONCE([BASIC_SETUP_TOOLS],
UTIL_REQUIRE_PROGS(TAR, gtar tar)
UTIL_REQUIRE_PROGS(TEE, tee)
UTIL_REQUIRE_PROGS(TOUCH, touch)
UTIL_REQUIRE_PROGS(TR, tr)
UTIL_REQUIRE_PROGS(UNAME, uname)
UTIL_REQUIRE_PROGS(WC, wc)
UTIL_REQUIRE_PROGS(XARGS, xargs)
# Required tools with some special treatment
# Then required tools that require some special treatment.
UTIL_REQUIRE_SPECIAL(GREP, [AC_PROG_GREP])
UTIL_REQUIRE_SPECIAL(EGREP, [AC_PROG_EGREP])
UTIL_REQUIRE_SPECIAL(FGREP, [AC_PROG_FGREP])
UTIL_REQUIRE_SPECIAL(SED, [AC_PROG_SED])
# Optional tools, we can do without them
UTIL_LOOKUP_PROGS(DF, df)
UTIL_LOOKUP_PROGS(GIT, git)
UTIL_LOOKUP_PROGS(NICE, nice)
UTIL_LOOKUP_PROGS(READLINK, greadlink readlink)
UTIL_LOOKUP_PROGS(WHOAMI, whoami)
# Tools only needed on some platforms
# These are only needed on some platforms
UTIL_LOOKUP_PROGS(PATHTOOL, cygpath wslpath)
UTIL_LOOKUP_PROGS(LSB_RELEASE, lsb_release)
UTIL_LOOKUP_PROGS(CMD, cmd.exe, $PATH:/cygdrive/c/windows/system32:/mnt/c/windows/system32:/c/windows/system32)
# For compare.sh only
UTIL_LOOKUP_PROGS(CMP, cmp)
@@ -298,7 +280,7 @@ AC_DEFUN([BASIC_CHECK_TAR],
if test "x$TAR_TYPE" = "xgnu"; then
TAR_INCLUDE_PARAM="T"
TAR_SUPPORTS_TRANSFORM="true"
elif test "x$TAR_TYPE" = "xaix"; then
elif test "x$TAR_TYPE" = "aix"; then
# -L InputList of aix tar: name of file listing the files and directories
# that need to be archived or extracted
TAR_INCLUDE_PARAM="L"
@@ -444,29 +426,22 @@ AC_DEFUN_ONCE([BASIC_SETUP_PANDOC],
[
UTIL_LOOKUP_PROGS(PANDOC, pandoc)
if test "x$PANDOC" != x; then
AC_MSG_CHECKING([for pandoc version])
PANDOC_VERSION=`$PANDOC --version 2>&1 | $TR -d '\r' | $HEAD -1 | $CUT -d " " -f 2`
AC_MSG_RESULT([$PANDOC_VERSION])
if test "x$PANDOC_VERSION" != x$RECOMMENDED_PANDOC_VERSION; then
AC_MSG_WARN([pandoc is version $PANDOC_VERSION, not the recommended version $RECOMMENDED_PANDOC_VERSION])
fi
PANDOC_MARKDOWN_FLAG="markdown"
AC_MSG_CHECKING([if the pandoc smart extension needs to be disabled for markdown])
if $PANDOC --list-extensions | $GREP -q '+smart'; then
if test -n "$PANDOC"; then
AC_MSG_CHECKING(if the pandoc smart extension needs to be disabled for markdown)
if $PANDOC --list-extensions | $GREP -q '\+smart'; then
AC_MSG_RESULT([yes])
PANDOC_MARKDOWN_FLAG="markdown-smart"
else
AC_MSG_RESULT([no])
fi
fi
if test -n "$PANDOC"; then
ENABLE_PANDOC="true"
else
ENABLE_PANDOC="false"
fi
AC_SUBST(ENABLE_PANDOC)
AC_SUBST(PANDOC_MARKDOWN_FLAG)
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -382,7 +382,7 @@ AC_DEFUN_ONCE([BOOTJDK_SETUP_BOOT_JDK],
# Finally, set some other options...
# Determine if the boot jdk jar supports the --date option
if $JAR --help 2>&1 | $GREP -q -e "--date=TIMESTAMP"; then
if $JAR --help 2>&1 | $GREP -q "\-\-date=TIMESTAMP"; then
BOOT_JDK_JAR_SUPPORTS_DATE=true
else
BOOT_JDK_JAR_SUPPORTS_DATE=false

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
#!/bin/sh
#
# Copyright (c) 2012, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2012, 2022, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2021, Azul Systems, Inc. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
@@ -29,27 +29,16 @@
# and fix the broken property, if needed.
DIR=`dirname $0`
OUT=`. $DIR/autoconf-config.guess 2> /dev/null`
OUT=`. $DIR/autoconf-config.guess`
# Handle some cases that autoconf-config.guess is not capable of
if [ "x$OUT" = x ]; then
if [ `uname -s` = Linux ]; then
# Test and fix little endian MIPS.
if [ `uname -m` = mipsel ]; then
OUT=mipsel-unknown-linux-gnu
fi
# Test and fix cygwin machine arch .x86_64
elif [[ `uname -s` = CYGWIN* ]]; then
if [ `uname -m` = ".x86_64" ]; then
OUT=x86_64-unknown-cygwin
fi
fi
if [ "x$OUT" = x ]; then
# Run autoconf-config.guess again to get the error message.
. $DIR/autoconf-config.guess > /dev/null
else
printf "guessed by custom config.guess... " >&2
# Detect C library.
# Use '-gnu' suffix on systems that use glibc.
# Use '-musl' suffix on systems that use the musl libc.
echo $OUT | grep -- -linux- > /dev/null 2> /dev/null
if test $? = 0; then
libc_vendor=`ldd --version 2>&1 | sed -n '1s/.*\(musl\).*/\1/p'`
if [ x"${libc_vendor}" = x"musl" ]; then
OUT=`echo $OUT | sed 's/-gnu/-musl/'`
fi
fi
@@ -68,11 +57,11 @@ if test $? = 0; then
fi
# Test and fix wsl
echo $OUT | grep '\(unknown\|pc\)-linux-gnu' > /dev/null 2> /dev/null
echo $OUT | grep unknown-linux-gnu > /dev/null 2> /dev/null
if test $? = 0; then
uname -r | grep -i microsoft > /dev/null 2> /dev/null
if test $? = 0; then
OUT=`echo $OUT | sed -e 's/\(unknown\|pc\)-linux-gnu/pc-wsl/'`
OUT=`echo $OUT | sed -e 's/unknown-linux-gnu/pc-wsl/'`
fi
fi
@@ -92,6 +81,57 @@ if test $? = 0; then
OUT=powerpc$KERNEL_BITMODE`echo $OUT | sed -e 's/[^-]*//'`
fi
# Test and fix little endian PowerPC64.
# TODO: should be handled by autoconf-config.guess.
if [ "x$OUT" = x ]; then
if [ `uname -m` = ppc64le ]; then
if [ `uname -s` = Linux ]; then
OUT=powerpc64le-unknown-linux-gnu
fi
fi
fi
# Test and fix little endian MIPS.
if [ "x$OUT" = x ]; then
if [ `uname -s` = Linux ]; then
if [ `uname -m` = mipsel ]; then
OUT=mipsel-unknown-linux-gnu
elif [ `uname -m` = mips64el ]; then
OUT=mips64el-unknown-linux-gnu
fi
fi
fi
# Test and fix LoongArch64.
if [ "x$OUT" = x ]; then
if [ `uname -s` = Linux ]; then
if [ `uname -m` = loongarch64 ]; then
OUT=loongarch64-unknown-linux-gnu
fi
fi
fi
# Test and fix RISC-V.
if [ "x$OUT" = x ]; then
if [ `uname -s` = Linux ]; then
if [ `uname -m` = riscv64 ]; then
OUT=riscv64-unknown-linux-gnu
fi
fi
fi
# Test and fix cpu on macos-aarch64, uname -p reports arm, buildsys expects aarch64
echo $OUT | grep arm-apple-darwin > /dev/null 2> /dev/null
if test $? != 0; then
# The GNU version of uname may be on the PATH which reports arm64 instead
echo $OUT | grep arm64-apple-darwin > /dev/null 2> /dev/null
fi
if test $? = 0; then
if [ `uname -m` = arm64 ]; then
OUT=aarch64`echo $OUT | sed -e 's/[^-]*//'`
fi
fi
# Test and fix cpu on Macosx when C preprocessor is not on the path
echo $OUT | grep i386-apple-darwin > /dev/null 2> /dev/null
if test $? = 0; then

View File

@@ -1,6 +1,6 @@
#!/bin/sh
#
# Copyright (c) 2014, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -29,16 +29,46 @@
DIR=`dirname $0`
if echo $* | grep linux-musl >/dev/null ; then
echo $*
exit
fi
# Allow wsl
if echo $* | grep pc-wsl >/dev/null ; then
echo $*
exit
fi
# Allow msys2
if echo $* | grep pc-msys >/dev/null ; then
echo $*
exit
fi
# Canonicalize for riscv which autoconf-config.sub doesn't handle
if echo $* | grep '^riscv\(32\|64\)-linux' >/dev/null ; then
result=`echo $@ | sed 's/linux/unknown-linux/'`
echo $result
exit
fi
# Filter out everything that doesn't begin with "aarch64-"
if ! echo $* | grep '^aarch64-' >/dev/null ; then
. $DIR/autoconf-config.sub "$@"
# autoconf-config.sub exits, so we never reach here, but just in
# case we do:
exit
fi
while test $# -gt 0 ; do
case $1 in
-- ) # Stop option processing
shift; break ;;
aarch64-* )
config=`echo $1 | sed 's/^aarch64-/arm-/'`
sub_args="$sub_args $config"
shift; ;;
- ) # Use stdin as input.
sub_args="$sub_args $1"
shift; break ;;
@@ -51,5 +81,7 @@ done
result=`. $DIR/autoconf-config.sub $sub_args "$@"`
exitcode=$?
result=`echo $result | sed "s/^arm-/aarch64-/"`
echo $result
exit $exitcode

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2025, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -26,29 +26,39 @@
AC_DEFUN([BPERF_CHECK_CORES],
[
AC_MSG_CHECKING([for number of cores])
NUM_CORES=1
FOUND_CORES=no
if test -f /proc/cpuinfo; then
# Looks like a Linux (or cygwin) system
NUM_CORES=`cat /proc/cpuinfo | grep -cw processor`
NUM_CORES=`cat /proc/cpuinfo | grep -c processor`
if test "$NUM_CORES" -eq "0"; then
NUM_CORES=`cat /proc/cpuinfo | grep -c ^CPU`
fi
if test "$NUM_CORES" -ne "0"; then
FOUND_CORES=yes
fi
elif test -x /usr/sbin/sysctl; then
# Looks like a MacOSX system
NUM_CORES=`/usr/sbin/sysctl -n hw.ncpu`
FOUND_CORES=yes
elif test "x$OPENJDK_BUILD_OS" = xaix ; then
NUM_CORES=`lparstat -m 2> /dev/null | $GREP -o "lcpu=[[0-9]]*" | $CUT -d "=" -f 2`
NUM_LCPU=`lparstat -m 2> /dev/null | $GREP -o "lcpu=[[0-9]]*" | $CUT -d "=" -f 2`
if test -n "$NUM_LCPU"; then
NUM_CORES=$NUM_LCPU
FOUND_CORES=yes
fi
elif test -n "$NUMBER_OF_PROCESSORS"; then
# On windows, look in the env
NUM_CORES=$NUMBER_OF_PROCESSORS
FOUND_CORES=yes
fi
if test "$NUM_CORES" -eq "0"; then
NUM_CORES=1
if test "x$FOUND_CORES" = xyes; then
AC_MSG_RESULT([$NUM_CORES])
else
AC_MSG_RESULT([could not detect number of cores, defaulting to 1])
AC_MSG_WARN([This will disable all parallelism from build!])
else
AC_MSG_RESULT([$NUM_CORES])
fi
])
@@ -75,8 +85,7 @@ AC_DEFUN([BPERF_CHECK_MEMORY_SIZE],
FOUND_MEM=yes
elif test "x$OPENJDK_BUILD_OS" = xwindows; then
# Windows, but without cygwin
MEMORY_SIZE=`powershell -Command \
"(Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory" | $SED 's/\\r//g' `
MEMORY_SIZE=`wmic computersystem get totalphysicalmemory -value | grep = | cut -d "=" -f 2-`
MEMORY_SIZE=`expr $MEMORY_SIZE / 1024 / 1024`
FOUND_MEM=yes
fi

View File

@@ -103,7 +103,3 @@ JVM_FEATURES_server := cds compiler1 compiler2 g1gc serialgc
override EXTRA_CFLAGS :=
override EXTRA_CXXFLAGS :=
override EXTRA_LDFLAGS :=
# hsdis is not needed
HSDIS_BACKEND := none
ENABLE_HSDIS_BUNDLING := false

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -86,8 +86,6 @@ PLATFORM_SETUP_OPENJDK_BUILD_AND_TARGET
# Continue setting up basic stuff. Most remaining code require fundamental tools.
BASIC_SETUP_PATHS
BASIC_SETUP_TOOLS
BASIC_SETUP_BUILD_ENV
# Check if it's a pure open build or if custom sources are to be used.
JDKOPT_SETUP_OPEN_OR_CUSTOM
@@ -217,16 +215,6 @@ JDKOPT_SETUP_CODE_COVERAGE
# AddressSanitizer
JDKOPT_SETUP_ADDRESS_SANITIZER
# UndefinedBehaviorSanitizer
JDKOPT_SETUP_UNDEFINED_BEHAVIOR_SANITIZER
# LeakSanitizer
JDKOPT_SETUP_LEAK_SANITIZER
# Fallback linker
# This needs to go before 'LIB_DETERMINE_DEPENDENCIES'
JDKOPT_SETUP_FALLBACK_LINKER
###############################################################################
#
# Check dependencies for external and internal libraries.
@@ -254,7 +242,6 @@ HOTSPOT_SETUP_MISC
###############################################################################
LIB_TESTS_ENABLE_DISABLE_FAILURE_HANDLER
LIB_TESTS_ENABLE_DISABLE_JTREG_TEST_THREAD_FACTORY
JDKOPT_ENABLE_DISABLE_GENERATE_CLASSLIST
JDKOPT_EXCLUDE_TRANSLATIONS
@@ -313,11 +300,9 @@ AC_OUTPUT
# After AC_OUTPUT, we need to do final work
CUSTOM_CONFIG_OUTPUT_GENERATED_HOOK
BASIC_POST_CONFIG_OUTPUT
# Finally output some useful information to the user
HELP_PRINT_SUMMARY_AND_WARNINGS
CUSTOM_SUMMARY_AND_WARNINGS_HOOK
HELP_REPEAT_WARNINGS
# All output is done. Do the post-config output management.
BASIC_POST_CONFIG_OUTPUT

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2025, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -28,7 +28,7 @@
# Setup flags for C/C++ compiler
#
################################################################################
###############################################################################
#
# How to compile shared libraries.
#
@@ -37,10 +37,7 @@ AC_DEFUN([FLAGS_SETUP_SHARED_LIBS],
if test "x$TOOLCHAIN_TYPE" = xgcc; then
# Default works for linux, might work on other platforms as well.
SHARED_LIBRARY_FLAGS='-shared'
# --disable-new-dtags forces use of RPATH instead of RUNPATH for rpaths.
# This protects internal library dependencies within the JDK from being
# overridden using LD_LIBRARY_PATH. See JDK-8326891 for more information.
SET_EXECUTABLE_ORIGIN='-Wl,-rpath,\$$ORIGIN[$]1 -Wl,--disable-new-dtags'
SET_EXECUTABLE_ORIGIN='-Wl,-rpath,\$$ORIGIN[$]1'
SET_SHARED_LIBRARY_ORIGIN="-Wl,-z,origin $SET_EXECUTABLE_ORIGIN"
SET_SHARED_LIBRARY_NAME='-Wl,-soname=[$]1'
SET_SHARED_LIBRARY_MAPFILE='-Wl,-version-script=[$]1'
@@ -54,21 +51,10 @@ AC_DEFUN([FLAGS_SETUP_SHARED_LIBS],
SET_SHARED_LIBRARY_NAME='-Wl,-install_name,@rpath/[$]1'
SET_SHARED_LIBRARY_MAPFILE='-Wl,-exported_symbols_list,[$]1'
elif test "x$OPENJDK_TARGET_OS" = xaix; then
# Linking is different on aix
SHARED_LIBRARY_FLAGS="-shared -Wl,-bM:SRE -Wl,-bnoentry"
SET_EXECUTABLE_ORIGIN=""
SET_SHARED_LIBRARY_ORIGIN=''
SET_SHARED_LIBRARY_NAME=''
SET_SHARED_LIBRARY_MAPFILE=''
else
# Default works for linux, might work on other platforms as well.
SHARED_LIBRARY_FLAGS='-shared'
SET_EXECUTABLE_ORIGIN='-Wl,-rpath,\$$ORIGIN[$]1'
if test "x$OPENJDK_TARGET_OS" = xlinux; then
SET_EXECUTABLE_ORIGIN="$SET_EXECUTABLE_ORIGIN -Wl,--disable-new-dtags"
fi
SET_SHARED_LIBRARY_NAME='-Wl,-soname=[$]1'
SET_SHARED_LIBRARY_MAPFILE='-Wl,-version-script=[$]1'
@@ -123,16 +109,6 @@ AC_DEFUN([FLAGS_SETUP_DEBUG_SYMBOLS],
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [${DEBUG_PREFIX_CFLAGS}],
IF_FALSE: [
DEBUG_PREFIX_CFLAGS=
],
IF_TRUE: [
# Add debug prefix map gcc system include paths, as they cause
# non-deterministic debug paths depending on gcc path location.
DEBUG_PREFIX_MAP_GCC_INCLUDE_PATHS
# Add debug prefix map for OUTPUTDIR to handle the scenario when
# it is not located within WORKSPACE_ROOT
outputdir_slash="${OUTPUTDIR%/}/"
DEBUG_PREFIX_CFLAGS="$DEBUG_PREFIX_CFLAGS -fdebug-prefix-map=${outputdir_slash}="
]
)
fi
@@ -152,12 +128,7 @@ AC_DEFUN([FLAGS_SETUP_DEBUG_SYMBOLS],
)
fi
# -gdwarf-4 and -gdwarf-aranges were introduced in clang 5.0
GDWARF_FLAGS="-gdwarf-4 -gdwarf-aranges"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [${GDWARF_FLAGS}],
IF_FALSE: [GDWARF_FLAGS=""])
CFLAGS_DEBUG_SYMBOLS="-g ${GDWARF_FLAGS}"
CFLAGS_DEBUG_SYMBOLS="-g"
ASFLAGS_DEBUG_SYMBOLS="-g"
elif test "x$TOOLCHAIN_TYPE" = xxlc; then
CFLAGS_DEBUG_SYMBOLS="-g1"
@@ -174,55 +145,6 @@ AC_DEFUN([FLAGS_SETUP_DEBUG_SYMBOLS],
AC_SUBST(ASFLAGS_DEBUG_SYMBOLS)
])
# gcc will embed the full system include paths in the debug info
# resulting in non-deterministic debug symbol files and thus
# non-reproducible native libraries if gcc includes are located
# in different paths.
# Add -fdebug-prefix-map'ings for root and gcc include paths,
# pointing to a common set of folders so that the binaries are deterministic:
# root include : /usr/include
# gcc include : /usr/local/gcc_include
# g++ include : /usr/local/gxx_include
AC_DEFUN([DEBUG_PREFIX_MAP_GCC_INCLUDE_PATHS],
[
# Determine gcc system include paths.
# Assume default roots to start with:
GCC_ROOT_INCLUDE="/usr/include"
# Determine is sysroot or devkit specified?
if test "x$SYSROOT" != "x"; then
GCC_ROOT_INCLUDE="${SYSROOT%/}/usr/include"
fi
# Add root include mapping => /usr/include
GCC_INCLUDE_DEBUG_MAP_FLAGS="-fdebug-prefix-map=${GCC_ROOT_INCLUDE}/=/usr/include/"
# Add gcc system include mapping => /usr/local/gcc_include
# Find location of stddef.h using build C compiler
GCC_SYSTEM_INCLUDE=`$ECHO "#include <stddef.h>" | \
$CC $CFLAGS -v -E - 2>&1 | \
$GREP stddef | $TAIL -1 | $TR -s " " | $CUT -d'"' -f2`
if test "x$GCC_SYSTEM_INCLUDE" != "x"; then
GCC_SYSTEM_INCLUDE=`$DIRNAME $GCC_SYSTEM_INCLUDE`
GCC_INCLUDE_DEBUG_MAP_FLAGS="$GCC_INCLUDE_DEBUG_MAP_FLAGS \
-fdebug-prefix-map=${GCC_SYSTEM_INCLUDE}/=/usr/local/gcc_include/"
fi
# Add g++ system include mapping => /usr/local/gxx_include
# Find location of cstddef using build C++ compiler
GXX_SYSTEM_INCLUDE=`$ECHO "#include <cstddef>" | \
$CXX $CXXFLAGS -v -E -x c++ - 2>&1 | \
$GREP cstddef | $TAIL -1 | $TR -s " " | $CUT -d'"' -f2`
if test "x$GXX_SYSTEM_INCLUDE" != "x"; then
GXX_SYSTEM_INCLUDE=`$DIRNAME $GXX_SYSTEM_INCLUDE`
GCC_INCLUDE_DEBUG_MAP_FLAGS="$GCC_INCLUDE_DEBUG_MAP_FLAGS \
-fdebug-prefix-map=${GXX_SYSTEM_INCLUDE}/=/usr/local/gxx_include/"
fi
# Add to debug prefix cflags
DEBUG_PREFIX_CFLAGS="$DEBUG_PREFIX_CFLAGS $GCC_INCLUDE_DEBUG_MAP_FLAGS"
])
AC_DEFUN([FLAGS_SETUP_WARNINGS],
[
# Set default value.
@@ -262,10 +184,6 @@ AC_DEFUN([FLAGS_SETUP_WARNINGS],
WARNINGS_ENABLE_ALL_CXXFLAGS="$WARNINGS_ENABLE_ALL_CFLAGS $WARNINGS_ENABLE_ADDITIONAL_CXX"
DISABLED_WARNINGS="unused-parameter unused"
# gcc10/11 on ppc generate lots of abi warnings about layout of aggregates containing vectors
if test "x$OPENJDK_TARGET_CPU_ARCH" = "xppc"; then
DISABLED_WARNINGS="$DISABLED_WARNINGS psabi"
fi
;;
clang)
@@ -279,6 +197,7 @@ AC_DEFUN([FLAGS_SETUP_WARNINGS],
WARNINGS_ENABLE_ALL="-Wall -Wextra -Wformat=2 $WARNINGS_ENABLE_ADDITIONAL"
DISABLED_WARNINGS="unknown-warning-option unused-parameter unused"
;;
xlc)
@@ -356,15 +275,9 @@ AC_DEFUN([FLAGS_SETUP_OPTIMIZATION],
C_O_FLAG_NONE="${C_O_FLAG_NONE} ${DISABLE_FORTIFY_CFLAGS}"
fi
elif test "x$TOOLCHAIN_TYPE" = xclang; then
if test "x$OPENJDK_TARGET_OS" = xaix; then
C_O_FLAG_HIGHEST_JVM="-O3 -finline-functions"
C_O_FLAG_HIGHEST="-O3 -finline-functions"
C_O_FLAG_HI="-O3 -finline-functions"
else
C_O_FLAG_HIGHEST_JVM="-O3"
C_O_FLAG_HIGHEST="-O3"
C_O_FLAG_HI="-O3"
fi
C_O_FLAG_NORM="-O2"
C_O_FLAG_DEBUG_JVM="-O0"
C_O_FLAG_SIZE="-Os"
@@ -387,7 +300,7 @@ AC_DEFUN([FLAGS_SETUP_OPTIMIZATION],
C_O_FLAG_DEBUG="-Od"
C_O_FLAG_DEBUG_JVM=""
C_O_FLAG_NONE="-Od"
C_O_FLAG_SIZE="-O1"
C_O_FLAG_SIZE="-Os"
fi
# Now copy to C++ flags
@@ -490,13 +403,13 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
[
#### OS DEFINES, these should be independent on toolchain
if test "x$OPENJDK_TARGET_OS" = xlinux; then
CFLAGS_OS_DEF_JVM="-DLINUX -D_FILE_OFFSET_BITS=64"
CFLAGS_OS_DEF_JVM="-DLINUX"
CFLAGS_OS_DEF_JDK="-D_GNU_SOURCE -D_REENTRANT -D_LARGEFILE64_SOURCE"
elif test "x$OPENJDK_TARGET_OS" = xmacosx; then
CFLAGS_OS_DEF_JVM="-D_ALLBSD_SOURCE -D_DARWIN_C_SOURCE -D_XOPEN_SOURCE"
CFLAGS_OS_DEF_JDK="-D_ALLBSD_SOURCE -D_DARWIN_UNLIMITED_SELECT"
elif test "x$OPENJDK_TARGET_OS" = xaix; then
CFLAGS_OS_DEF_JVM="-DAIX -D_LARGE_FILES"
CFLAGS_OS_DEF_JVM="-DAIX"
elif test "x$OPENJDK_TARGET_OS" = xbsd; then
CFLAGS_OS_DEF_JDK="-D_ALLBSD_SOURCE"
elif test "x$OPENJDK_TARGET_OS" = xwindows; then
@@ -536,9 +449,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
# so for debug we build with '-qpic=large -bbigtoc'.
DEBUG_CFLAGS_JVM="-qpic=large"
fi
if test "x$TOOLCHAIN_TYPE" = xclang && test "x$OPENJDK_TARGET_OS" = xaix; then
DEBUG_CFLAGS_JVM="-fpic -mcmodel=large"
fi
fi
if test "x$DEBUG_LEVEL" != xrelease; then
@@ -561,7 +471,8 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
ALWAYS_DEFINES_JDK="-DWIN32_LEAN_AND_MEAN -D_WIN32_WINNT=0x0602 \
-D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE -DWIN32 -DIAL"
ALWAYS_DEFINES_JVM="-DNOMINMAX -DWIN32_LEAN_AND_MEAN -D_WIN32_WINNT=0x0602 \
-D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE"
-D_CRT_SECURE_NO_WARNINGS -D_CRT_NONSTDC_NO_DEPRECATE \
-D_WINSOCK_DEPRECATED_NO_WARNINGS"
fi
###############################################################################
@@ -574,14 +485,8 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
-fvisibility=hidden -fno-strict-aliasing -fno-omit-frame-pointer"
fi
if test "x$TOOLCHAIN_TYPE" = xclang && test "x$OPENJDK_TARGET_OS" = xaix; then
# clang compiler on aix needs -ffunction-sections
TOOLCHAIN_CFLAGS_JVM="$TOOLCHAIN_CFLAGS_JVM -ffunction-sections -ftls-model -fno-math-errno -fstack-protector"
TOOLCHAIN_CFLAGS_JDK="-ffunction-sections -fsigned-char -fstack-protector"
fi
if test "x$TOOLCHAIN_TYPE" = xgcc; then
TOOLCHAIN_CFLAGS_JVM="$TOOLCHAIN_CFLAGS_JVM -fstack-protector"
TOOLCHAIN_CFLAGS_JVM="$TOOLCHAIN_CFLAGS_JVM -fcheck-new -fstack-protector"
TOOLCHAIN_CFLAGS_JDK="-pipe -fstack-protector"
# reduce lib size on linux in link step, this needs also special compile flags
# do this on s390x also for libjvm (where serviceability agent is not supported)
@@ -622,7 +527,7 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
# Suggested additions: -qsrcmsg to get improved error reporting
# set -qtbtable=full for a better traceback table/better stacks in hs_err when xlc16 is used
TOOLCHAIN_CFLAGS_JDK="-qtbtable=full -qchars=signed -qfullpath -qsaveopt -qstackprotect" # add on both CFLAGS
TOOLCHAIN_CFLAGS_JVM="-qtbtable=full -qtune=balanced -fno-exceptions \
TOOLCHAIN_CFLAGS_JVM="-qtbtable=full -qtune=balanced \
-qalias=noansi -qstrict -qtls=default -qnortti -qnoeh -qignerrno -qstackprotect"
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
TOOLCHAIN_CFLAGS_JVM="-nologo -MD -Zc:preprocessor -Zc:strictStrings -MP"
@@ -688,9 +593,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
PICFLAG="-fPIC"
PIEFLAG="-fPIE"
elif test "x$TOOLCHAIN_TYPE" = xclang && test "x$OPENJDK_TARGET_OS" = xaix; then
JVM_PICFLAG="-fpic -mcmodel=large -Wl,-bbigtoc
JDK_PICFLAG="-fpic
elif test "x$TOOLCHAIN_TYPE" = xxlc; then
# '-qpic' defaults to 'qpic=small'. This means that the compiler generates only
# one instruction for accessing the TOC. If the TOC grows larger than 64K, the linker
@@ -727,7 +629,7 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
STATIC_LIBS_CFLAGS="-DSTATIC_BUILD=1"
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
STATIC_LIBS_CFLAGS="$STATIC_LIBS_CFLAGS -ffunction-sections -fdata-sections \
-DJNIEXPORT='__attribute__((visibility(\"default\")))'"
-DJNIEXPORT='__attribute__((visibility(\"hidden\")))'"
else
STATIC_LIBS_CFLAGS="$STATIC_LIBS_CFLAGS -DJNIEXPORT="
fi
@@ -835,14 +737,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
# for all archs except arm and ppc, prevent gcc to omit frame pointer
$1_CFLAGS_CPU_JDK="${$1_CFLAGS_CPU_JDK} -fno-omit-frame-pointer"
fi
if test "x$FLAGS_CPU" = xppc64le; then
# Little endian machine uses ELFv2 ABI.
# Use Power8, this is the first CPU to support PPC64 LE with ELFv2 ABI.
$1_CFLAGS_CPU_JVM="${$1_CFLAGS_CPU_JVM} -DABI_ELFv2 -mcpu=power8 -mtune=power8"
fi
fi
if test "x$OPENJDK_TARGET_OS" = xaix; then
$1_CFLAGS_CPU="-mcpu=pwr8"
fi
elif test "x$TOOLCHAIN_TYPE" = xxlc; then
@@ -867,6 +761,15 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
$1_TOOLCHAIN_CFLAGS="${$1_GCC6_CFLAGS}"
$1_WARNING_CFLAGS_JVM="-Wno-format-zero-length -Wtype-limits -Wuninitialized"
elif test "x$TOOLCHAIN_TYPE" = xclang; then
NO_DELETE_NULL_POINTER_CHECKS_CFLAG="-fno-delete-null-pointer-checks"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [$NO_DELETE_NULL_POINTER_CHECKS_CFLAG],
PREFIX: $3,
IF_FALSE: [
NO_DELETE_NULL_POINTER_CHECKS_CFLAG=
]
)
$1_TOOLCHAIN_CFLAGS="${NO_DELETE_NULL_POINTER_CHECKS_CFLAG}"
fi
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
@@ -877,7 +780,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
REPRODUCIBLE_CFLAGS=
]
)
AC_SUBST(REPRODUCIBLE_CFLAGS)
fi
# Prevent the __FILE__ macro from generating absolute paths into the built
@@ -911,22 +813,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
FILE_MACRO_CFLAGS=
]
)
if test "x$FILE_MACRO_CFLAGS" != x; then
# Add -pathmap for all VS system include paths using Windows
# full Long path name that is generated by the compiler
# Not enabled under WSL as there is no easy way to obtain the
# Windows full long paths, thus reproducible WSL builds will
# depend on building with the same VS toolchain install location.
if test "x$OPENJDK_BUILD_OS_ENV" != "xwindows.wsl1" && test "x$OPENJDK_BUILD_OS_ENV" != "xwindows.wsl2"; then
for ipath in ${$3SYSROOT_CFLAGS}; do
if test "x${ipath:0:2}" == "x-I"; then
ipath_path=${ipath#"-I"}
UTIL_FIXUP_WIN_LONG_PATH(ipath_path)
FILE_MACRO_CFLAGS="$FILE_MACRO_CFLAGS -pathmap:\"$ipath_path\"=vsi"
fi
done
fi
fi
fi
AC_MSG_CHECKING([how to prevent absolute paths in output])
@@ -1004,12 +890,17 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
# $2 - Prefix for compiler variables (either BUILD_ or nothing).
AC_DEFUN([FLAGS_SETUP_GCC6_COMPILER_FLAGS],
[
# This flag is required for GCC 6 builds as undefined behavior in OpenJDK code
# runs afoul of the more aggressive versions of this optimization.
# These flags are required for GCC 6 builds as undefined behavior in OpenJDK code
# runs afoul of the more aggressive versions of these optimizations.
# Notably, value range propagation now assumes that the this pointer of C++
# member functions is non-null.
NO_DELETE_NULL_POINTER_CHECKS_CFLAG="-fno-delete-null-pointer-checks"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [$NO_DELETE_NULL_POINTER_CHECKS_CFLAG],
PREFIX: $2, IF_FALSE: [NO_DELETE_NULL_POINTER_CHECKS_CFLAG=""])
NO_LIFETIME_DSE_CFLAG="-fno-lifetime-dse"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [$NO_LIFETIME_DSE_CFLAG],
PREFIX: $2, IF_FALSE: [NO_LIFETIME_DSE_CFLAG=""])
$1_GCC6_CFLAGS="${NO_LIFETIME_DSE_CFLAG}"
$1_GCC6_CFLAGS="${NO_DELETE_NULL_POINTER_CHECKS_CFLAG} ${NO_LIFETIME_DSE_CFLAG}"
])
AC_DEFUN_ONCE([FLAGS_SETUP_BRANCH_PROTECTION],

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -70,16 +70,12 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
fi
fi
BASIC_LDFLAGS_JVM_ONLY=""
BASIC_LDFLAGS_JVM_ONLY="-Wl,-O1"
elif test "x$TOOLCHAIN_TYPE" = xclang; then
BASIC_LDFLAGS_JVM_ONLY="-mno-omit-leaf-frame-pointer -mstack-alignment=16 \
-fPIC"
if test "x$OPENJDK_TARGET_OS" = xaix; then
BASIC_LDFLAGS="-Wl,-b64 -Wl,-brtl -Wl,-bnorwexec -Wl,-bnolibpath -Wl,-bnoexpall \
-Wl,-bernotok -Wl,-bdatapsize:64k -Wl,-btextpsize:64k -Wl,-bstackpsize:64k"
BASIC_LDFLAGS_JVM_ONLY="$BASIC_LDFLAGS_JVM_ONLY -Wl,-lC_r -Wl,-bbigtoc"
fi
elif test "x$TOOLCHAIN_TYPE" = xxlc; then
BASIC_LDFLAGS="-b64 -brtl -bnorwexec -bnolibpath -bnoexpall -bernotok -btextpsize:64K \
-bdatapsize:64K -bstackpsize:64K"
@@ -92,8 +88,7 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
BASIC_LDFLAGS_JVM_ONLY="-opt:icf,8 -subsystem:windows"
fi
if (test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang) \
&& test "x$OPENJDK_TARGET_OS" != xaix; then
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
if test -n "$HAS_NOEXECSTACK"; then
BASIC_LDFLAGS="$BASIC_LDFLAGS -Wl,-z,noexecstack"
fi
@@ -121,14 +116,6 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
if test "x$DEBUG_LEVEL" != xrelease; then
DEBUGLEVEL_LDFLAGS_JVM_ONLY="$DEBUGLEVEL_LDFLAGS_JVM_ONLY -bbigtoc"
fi
elif test "x$TOOLCHAIN_TYPE" = xclang && test "x$OPENJDK_TARGET_OS" = xaix; then
# We need '-fpic' or '-fpic -mcmodel=large -Wl,-bbigtoc' if the TOC overflows.
# Hotspot now overflows its 64K TOC (currently only for debug),
# so we build with '-fpic -mcmodel=large -Wl,-bbigtoc'.
if test "x$DEBUG_LEVEL" != xrelease; then
DEBUGLEVEL_LDFLAGS_JVM_ONLY="$DEBUGLEVEL_LDFLAGS_JVM_ONLY -Wl,-bbigtoc"
fi
fi
# Setup LDFLAGS for linking executables

View File

@@ -46,31 +46,15 @@ AC_DEFUN([FLAGS_SETUP_ARFLAGS],
AC_DEFUN([FLAGS_SETUP_STRIPFLAGS],
[
## Setup strip.
if test "x$STRIP" != x; then
AC_MSG_CHECKING([how to run strip])
# Easy cheat: Check strip variant by passing --version as an argument.
# Different types of strip have varying command line syntaxes for querying their
# version string, and all noisily fail if the provided version option is not
# recognised.
#
# The actual version string or failure to execute strip are hidden by redirection
# to config.log with 2>&AS_MESSAGE_LOG_FD >&AS_MESSAGE_LOG_FD
if $STRIP "--version" 2>&AS_MESSAGE_LOG_FD >&AS_MESSAGE_LOG_FD; then
# strip that comes from the GNU family uses --version
# This variant of strip is usually found accompanying gcc and clang
STRIPFLAGS="--strip-debug"
elif $STRIP "-V" 2>&AS_MESSAGE_LOG_FD >&AS_MESSAGE_LOG_FD; then
# IBM strip that works with AIX binaries only supports -V
STRIPFLAGS="-X32_64"
else
# The only strip variant left is MacOS/Xcode strip, which does not have any
# way whatsoever to be identified (lacking even basic help or version options),
# so we leave it as the last fallback when all other tests have failed.
# FIXME: should this really be per platform, or should it be per toolchain type?
# strip is not provided by clang; so guessing platform makes most sense.
# FIXME: we should really only export STRIPFLAGS from here, not POST_STRIP_CMD.
if test "x$OPENJDK_TARGET_OS" = xlinux; then
STRIPFLAGS="-g"
elif test "x$OPENJDK_TARGET_OS" = xmacosx; then
STRIPFLAGS="-S"
fi
AC_MSG_RESULT($STRIPFLAGS)
elif test "x$OPENJDK_TARGET_OS" = xaix; then
STRIPFLAGS="-X32_64"
fi
AC_SUBST(STRIPFLAGS)
@@ -88,16 +72,6 @@ AC_DEFUN([FLAGS_SETUP_RCFLAGS],
AC_SUBST(RCFLAGS)
])
AC_DEFUN([FLAGS_SETUP_NMFLAGS],
[
# On AIX, we need to set NM flag -X64 for processing 64bit object files
if test "x$OPENJDK_TARGET_OS" = xaix; then
NMFLAGS="-X64"
fi
AC_SUBST(NMFLAGS)
])
################################################################################
# platform independent
AC_DEFUN([FLAGS_SETUP_ASFLAGS],
@@ -143,3 +117,4 @@ AC_DEFUN([FLAGS_SETUP_ASFLAGS_CPU_DEP],
AC_SUBST($2JVM_ASFLAGS)
])

View File

@@ -342,7 +342,6 @@ AC_DEFUN([FLAGS_SETUP_TOOLCHAIN_CONTROL],
AC_MSG_CHECKING([if @file is supported by gcc])
# Extra empty "" to prevent ECHO from interpreting '--version' as argument
$ECHO "" "--version" > command.file
# Redirect stderr and stdout to config.log (AS_MESSAGE_LOG_FD) via merge
if $CXX @command.file 2>&AS_MESSAGE_LOG_FD >&AS_MESSAGE_LOG_FD; then
AC_MSG_RESULT(yes)
COMPILER_COMMAND_FILE_FLAG="@"
@@ -428,7 +427,6 @@ AC_DEFUN([FLAGS_SETUP_FLAGS],
FLAGS_SETUP_ARFLAGS
FLAGS_SETUP_STRIPFLAGS
FLAGS_SETUP_RCFLAGS
FLAGS_SETUP_NMFLAGS
FLAGS_SETUP_ASFLAGS
FLAGS_SETUP_ASFLAGS_CPU_DEP([TARGET])
@@ -504,14 +502,14 @@ UTIL_DEFUN_NAMED([FLAGS_CXX_COMPILER_CHECK_ARGUMENTS],
UTIL_DEFUN_NAMED([FLAGS_COMPILER_CHECK_ARGUMENTS],
[*ARGUMENT IF_TRUE IF_FALSE PREFIX], [$@],
[
FLAGS_C_COMPILER_CHECK_ARGUMENTS(ARGUMENT: ARG_ARGUMENT,
FLAGS_C_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [ARG_ARGUMENT],
IF_TRUE: [C_COMP_SUPPORTS="yes"],
IF_FALSE: [C_COMP_SUPPORTS="no"],
PREFIX: ARG_PREFIX)
FLAGS_CXX_COMPILER_CHECK_ARGUMENTS(ARGUMENT: ARG_ARGUMENT,
PREFIX: [ARG_PREFIX])
FLAGS_CXX_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [ARG_ARGUMENT],
IF_TRUE: [CXX_COMP_SUPPORTS="yes"],
IF_FALSE: [CXX_COMP_SUPPORTS="no"],
PREFIX: ARG_PREFIX)
PREFIX: [ARG_PREFIX])
AC_MSG_CHECKING([if both ARG_PREFIX[CC] and ARG_PREFIX[CXX] support "ARG_ARGUMENT"])
supports=no

View File

@@ -278,11 +278,6 @@ AC_DEFUN_ONCE([HELP_PRINT_SUMMARY_AND_WARNINGS],
printf "using default settings.\n"
fi
if test "x$REAL_CONFIGURE_COMMAND_EXEC_FULL" != x; then
printf "\n"
printf "The original configure invocation was '$REAL_CONFIGURE_COMMAND_EXEC_SHORT $REAL_CONFIGURE_COMMAND_LINE'.\n"
fi
printf "\n"
printf "Configuration summary:\n"
printf "* Name: $CONF_NAME\n"

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -190,16 +190,6 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_JDK_OPTIONS],
fi
AC_SUBST(INCLUDE_SA)
# Setup default CDS alignment. On platforms where one build may run on machines with different
# page sizes, the JVM choses a compatible alignment to fit all possible page sizes. This slightly
# increases archive size.
# The only platform having this problem at the moment is Linux on aarch64, which may encounter
# three different page sizes: 4K, 64K, and if run on Mac m1 hardware, 16K.
COMPATIBLE_CDS_ALIGNMENT_DEFAULT=false
if test "x$OPENJDK_TARGET_OS" = "xlinux" && test "x$OPENJDK_TARGET_CPU" = "xaarch64"; then
COMPATIBLE_CDS_ALIGNMENT_DEFAULT=auto
fi
# Compress jars
COMPRESS_JARS=false
@@ -420,13 +410,12 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_CODE_COVERAGE],
#
AC_DEFUN_ONCE([JDKOPT_SETUP_ADDRESS_SANITIZER],
[
UTIL_ARG_ENABLE(NAME: asan, DEFAULT: false, RESULT: ASAN_ENABLED,
UTIL_ARG_ENABLE(NAME: asan, DEFAULT: false,
DESC: [enable AddressSanitizer],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if AddressSanitizer (asan) is available])
if test "x$TOOLCHAIN_TYPE" = "xgcc" ||
test "x$TOOLCHAIN_TYPE" = "xclang" ||
test "x$TOOLCHAIN_TYPE" = "xmicrosoft"; then
test "x$TOOLCHAIN_TYPE" = "xclang"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
@@ -434,20 +423,11 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_ADDRESS_SANITIZER],
fi
],
IF_ENABLED: [
if test "x$TOOLCHAIN_TYPE" = "xgcc" ||
test "x$TOOLCHAIN_TYPE" = "xclang"; then
# ASan is simply incompatible with gcc -Wstringop-truncation. See
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85650
# It's harmless to be suppressed in clang as well.
ASAN_CFLAGS="-fsanitize=address -Wno-stringop-truncation -fno-omit-frame-pointer -fno-common -DADDRESS_SANITIZER"
ASAN_CFLAGS="-fsanitize=address -Wno-stringop-truncation -fno-omit-frame-pointer"
ASAN_LDFLAGS="-fsanitize=address"
elif test "x$TOOLCHAIN_TYPE" = "xmicrosoft"; then
# -Oy- is equivalent to -fno-omit-frame-pointer in GCC/Clang.
ASAN_CFLAGS="-fsanitize=address -Oy- -DADDRESS_SANITIZER"
# MSVC produces a warning if you pass -fsanitize=address to the linker. It also complains
$ if -DEBUG is not passed to the linker when building with ASan.
ASAN_LDFLAGS="-debug"
fi
JVM_CFLAGS="$JVM_CFLAGS $ASAN_CFLAGS"
JVM_LDFLAGS="$JVM_LDFLAGS $ASAN_LDFLAGS"
CFLAGS_JDKLIB="$CFLAGS_JDKLIB $ASAN_CFLAGS"
@@ -456,91 +436,15 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_ADDRESS_SANITIZER],
CXXFLAGS_JDKEXE="$CXXFLAGS_JDKEXE $ASAN_CFLAGS"
LDFLAGS_JDKLIB="$LDFLAGS_JDKLIB $ASAN_LDFLAGS"
LDFLAGS_JDKEXE="$LDFLAGS_JDKEXE $ASAN_LDFLAGS"
ASAN_ENABLED="yes"
],
IF_DISABLED: [
ASAN_ENABLED="no"
])
AC_SUBST(ASAN_ENABLED)
])
###############################################################################
#
# LeakSanitizer
#
AC_DEFUN_ONCE([JDKOPT_SETUP_LEAK_SANITIZER],
[
UTIL_ARG_ENABLE(NAME: lsan, DEFAULT: false, RESULT: LSAN_ENABLED,
DESC: [enable LeakSanitizer],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if LeakSanitizer (lsan) is available])
if test "x$TOOLCHAIN_TYPE" = "xgcc" ||
test "x$TOOLCHAIN_TYPE" = "xclang"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AVAILABLE=false
fi
],
IF_ENABLED: [
LSAN_CFLAGS="-fsanitize=leak -fno-omit-frame-pointer -DLEAK_SANITIZER"
LSAN_LDFLAGS="-fsanitize=leak"
JVM_CFLAGS="$JVM_CFLAGS $LSAN_CFLAGS"
JVM_LDFLAGS="$JVM_LDFLAGS $LSAN_LDFLAGS"
CFLAGS_JDKLIB="$CFLAGS_JDKLIB $LSAN_CFLAGS"
CFLAGS_JDKEXE="$CFLAGS_JDKEXE $LSAN_CFLAGS"
CXXFLAGS_JDKLIB="$CXXFLAGS_JDKLIB $LSAN_CFLAGS"
CXXFLAGS_JDKEXE="$CXXFLAGS_JDKEXE $LSAN_CFLAGS"
LDFLAGS_JDKLIB="$LDFLAGS_JDKLIB $LSAN_LDFLAGS"
LDFLAGS_JDKEXE="$LDFLAGS_JDKEXE $LSAN_LDFLAGS"
])
AC_SUBST(LSAN_ENABLED)
])
###############################################################################
#
# UndefinedBehaviorSanitizer
#
AC_DEFUN_ONCE([JDKOPT_SETUP_UNDEFINED_BEHAVIOR_SANITIZER],
[
UTIL_ARG_WITH(NAME: additional-ubsan-checks, TYPE: string,
DEFAULT: [],
DESC: [Customizes the ubsan checks],
OPTIONAL: true)
# GCC reports lots of likely false positives for stringop-truncation and format-overflow.
# Silence them for now.
UBSAN_CHECKS="-fsanitize=undefined -fsanitize=float-divide-by-zero -fno-sanitize=shift-base -fno-sanitize=alignment \
$ADDITIONAL_UBSAN_CHECKS"
UBSAN_CFLAGS="$UBSAN_CHECKS -Wno-stringop-truncation -Wno-format-overflow -fno-omit-frame-pointer -DUNDEFINED_BEHAVIOR_SANITIZER"
UBSAN_LDFLAGS="$UBSAN_CHECKS"
UTIL_ARG_ENABLE(NAME: ubsan, DEFAULT: false, RESULT: UBSAN_ENABLED,
DESC: [enable UndefinedBehaviorSanitizer],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if UndefinedBehaviorSanitizer (ubsan) is available])
if test "x$TOOLCHAIN_TYPE" = "xgcc" ||
test "x$TOOLCHAIN_TYPE" = "xclang"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AVAILABLE=false
fi
],
IF_ENABLED: [
JVM_CFLAGS="$JVM_CFLAGS $UBSAN_CFLAGS"
JVM_LDFLAGS="$JVM_LDFLAGS $UBSAN_LDFLAGS"
CFLAGS_JDKLIB="$CFLAGS_JDKLIB $UBSAN_CFLAGS"
CFLAGS_JDKEXE="$CFLAGS_JDKEXE $UBSAN_CFLAGS"
CXXFLAGS_JDKLIB="$CXXFLAGS_JDKLIB $UBSAN_CFLAGS"
CXXFLAGS_JDKEXE="$CXXFLAGS_JDKEXE $UBSAN_CFLAGS"
LDFLAGS_JDKLIB="$LDFLAGS_JDKLIB $UBSAN_LDFLAGS"
LDFLAGS_JDKEXE="$LDFLAGS_JDKEXE $UBSAN_LDFLAGS"
])
if test "x$UBSAN_ENABLED" = xfalse; then
UBSAN_CFLAGS=""
UBSAN_LDFLAGS=""
fi
AC_SUBST(UBSAN_CFLAGS)
AC_SUBST(UBSAN_LDFLAGS)
AC_SUBST(UBSAN_ENABLED)
])
################################################################################
#
# Static build support. When enabled will generate static
@@ -689,10 +593,10 @@ AC_DEFUN([JDKOPT_ENABLE_DISABLE_CDS_ARCHIVE],
#
AC_DEFUN([JDKOPT_ENABLE_DISABLE_COMPATIBLE_CDS_ALIGNMENT],
[
UTIL_ARG_ENABLE(NAME: compatible-cds-alignment, DEFAULT: $COMPATIBLE_CDS_ALIGNMENT_DEFAULT,
UTIL_ARG_ENABLE(NAME: compatible-cds-alignment, DEFAULT: false,
RESULT: ENABLE_COMPATIBLE_CDS_ALIGNMENT,
DESC: [enable use alternative compatible cds core region alignment],
DEFAULT_DESC: [disabled except on linux-aarch64],
DEFAULT_DESC: [disabled],
CHECKING_MSG: [if compatible cds region alignment enabled],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if CDS archive is available])
@@ -838,10 +742,7 @@ AC_DEFUN([JDKOPT_CHECK_CODESIGN_PARAMS],
$RM "$CODESIGN_TESTFILE"
$TOUCH "$CODESIGN_TESTFILE"
CODESIGN_SUCCESS=false
$ECHO "check codesign, calling $CODESIGN $PARAMS $CODESIGN_TESTFILE" >&AS_MESSAGE_LOG_FD
eval \"$CODESIGN\" $PARAMS \"$CODESIGN_TESTFILE\" 2>&AS_MESSAGE_LOG_FD \
$CODESIGN $PARAMS "$CODESIGN_TESTFILE" 2>&AS_MESSAGE_LOG_FD \
>&AS_MESSAGE_LOG_FD && CODESIGN_SUCCESS=true
$RM "$CODESIGN_TESTFILE"
AC_MSG_CHECKING([$MESSAGE])
@@ -854,7 +755,7 @@ AC_DEFUN([JDKOPT_CHECK_CODESIGN_PARAMS],
AC_DEFUN([JDKOPT_CHECK_CODESIGN_HARDENED],
[
JDKOPT_CHECK_CODESIGN_PARAMS([-s \"$MACOSX_CODESIGN_IDENTITY\" --option runtime],
JDKOPT_CHECK_CODESIGN_PARAMS([-s "$MACOSX_CODESIGN_IDENTITY" --option runtime],
[if codesign with hardened runtime is possible])
])
@@ -880,7 +781,7 @@ AC_DEFUN([JDKOPT_SETUP_MACOSX_SIGNING],
# Check for user provided code signing identity.
UTIL_ARG_WITH(NAME: macosx-codesign-identity, TYPE: string,
DEFAULT: openjdk_codesign, CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY],
DEFAULT: openjdk_codesign, CHECK_VALUE: UTIL_CHECK_STRING_NON_EMPTY,
DESC: [specify the macosx code signing identity],
CHECKING_MSG: [for macosx code signing identity]
)
@@ -922,22 +823,3 @@ AC_DEFUN([JDKOPT_SETUP_MACOSX_SIGNING],
AC_SUBST(MACOSX_CODESIGN_MODE)
fi
])
################################################################################
#
# fallback linker
#
AC_DEFUN_ONCE([JDKOPT_SETUP_FALLBACK_LINKER],
[
FALLBACK_LINKER_DEFAULT=false
if HOTSPOT_CHECK_JVM_VARIANT(zero); then
FALLBACK_LINKER_DEFAULT=true
fi
UTIL_ARG_ENABLE(NAME: fallback-linker, DEFAULT: $FALLBACK_LINKER_DEFAULT,
RESULT: ENABLE_FALLBACK_LINKER,
DESC: [enable libffi-based fallback implementation of java.lang.foreign.Linker],
CHECKING_MSG: [if fallback linker enabled])
AC_SUBST(ENABLE_FALLBACK_LINKER)
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2015, 2021, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -69,100 +69,104 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_SUBST(JDK_RC_PLATFORM_NAME)
AC_SUBST(HOTSPOT_VM_DISTRO)
# Note: UTIL_ARG_WITH treats empty strings as valid values when OPTIONAL is false!
# Setup username (for use in adhoc version strings etc)
AC_ARG_WITH([build-user], [AS_HELP_STRING([--with-build-user],
[build username to use in version strings])])
if test "x$with_build_user" = xyes || test "x$with_build_user" = xno; then
AC_MSG_ERROR([--with-build-user must have a value])
elif test "x$with_build_user" != x; then
USERNAME="$with_build_user"
else
# Outer [ ] to quote m4.
[ USERNAME=`$ECHO "$USER" | $TR -d -c '[a-z][A-Z][0-9]'` ]
# $USER may be not defined in dockers, so try to check with $WHOAMI
if test "x$USERNAME" = x && test "x$WHOAMI" != x; then
[ USERNAME=`$WHOAMI | $TR -d -c '[a-z][A-Z][0-9]'` ]
fi
# Setup username (for use in adhoc version strings etc)
UTIL_ARG_WITH(NAME: build-user, TYPE: string,
RESULT: USERNAME,
DEFAULT: $USERNAME,
DESC: [build username to use in version strings],
DEFAULT_DESC: [current username, sanitized],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY])
AC_SUBST(USERNAME)
# Set the JDK RC name
AC_ARG_WITH(jdk-rc-name, [AS_HELP_STRING([--with-jdk-rc-name],
[Set JDK RC name. This is used for FileDescription and ProductName properties
of MS Windows binaries. @<:@not specified@:>@])])
if test "x$with_jdk_rc_name" = xyes || test "x$with_jdk_rc_name" = xno; then
AC_MSG_ERROR([--with-jdk-rc-name must have a value])
elif [ ! [[ $with_jdk_rc_name =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-jdk-rc-name contains non-printing characters: $with_jdk_rc_name])
elif test "x$with_jdk_rc_name" != x; then
# Set JDK_RC_NAME to a custom value if '--with-jdk-rc-name' was used and is not empty.
JDK_RC_NAME="$with_jdk_rc_name"
else
# Otherwise calculate from "branding.conf" included above.
UTIL_ARG_WITH(NAME: jdk-rc-name, TYPE: string,
DEFAULT: $PRODUCT_NAME $JDK_RC_PLATFORM_NAME,
DESC: [Set JDK RC name. This is used for FileDescription and ProductName
properties of MS Windows binaries.],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
JDK_RC_NAME="$PRODUCT_NAME $JDK_RC_PLATFORM_NAME"
fi
AC_SUBST(JDK_RC_NAME)
# The vendor name, if any
AC_ARG_WITH(vendor-name, [AS_HELP_STRING([--with-vendor-name],
[Set vendor name. Among others, used to set the 'java.vendor'
and 'java.vm.vendor' system properties. @<:@not specified@:>@])])
if test "x$with_vendor_name" = xyes || test "x$with_vendor_name" = xno; then
AC_MSG_ERROR([--with-vendor-name must have a value])
elif [ ! [[ $with_vendor_name =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-vendor-name contains non-printing characters: $with_vendor_name])
elif test "x$with_vendor_name" != x; then
# Only set COMPANY_NAME if '--with-vendor-name' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
UTIL_ARG_WITH(NAME: vendor-name, TYPE: string,
RESULT: COMPANY_NAME,
DEFAULT: $COMPANY_NAME,
DESC: [Set vendor name. Among others, used to set the 'java.vendor'
and 'java.vm.vendor' system properties.],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
COMPANY_NAME="$with_vendor_name"
fi
AC_SUBST(COMPANY_NAME)
# Set the JDK RC Company name
# Otherwise uses the value set for "vendor-name".
UTIL_ARG_WITH(NAME: jdk-rc-company-name, TYPE: string,
DEFAULT: $COMPANY_NAME,
DESC: [Set JDK RC company name. This is used for CompanyName properties of MS Windows binaries.],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
AC_SUBST(JDK_RC_COMPANY_NAME)
# The vendor URL, if any
AC_ARG_WITH(vendor-url, [AS_HELP_STRING([--with-vendor-url],
[Set the 'java.vendor.url' system property @<:@not specified@:>@])])
if test "x$with_vendor_url" = xyes || test "x$with_vendor_url" = xno; then
AC_MSG_ERROR([--with-vendor-url must have a value])
elif [ ! [[ $with_vendor_url =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-vendor-url contains non-printing characters: $with_vendor_url])
elif test "x$with_vendor_url" != x; then
# Only set VENDOR_URL if '--with-vendor-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
UTIL_ARG_WITH(NAME: vendor-url, TYPE: string,
DEFAULT: $VENDOR_URL,
DESC: [Set the 'java.vendor.url' system property],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
VENDOR_URL="$with_vendor_url"
fi
AC_SUBST(VENDOR_URL)
# The vendor bug URL, if any
AC_ARG_WITH(vendor-bug-url, [AS_HELP_STRING([--with-vendor-bug-url],
[Set the 'java.vendor.url.bug' system property @<:@not specified@:>@])])
if test "x$with_vendor_bug_url" = xyes || test "x$with_vendor_bug_url" = xno; then
AC_MSG_ERROR([--with-vendor-bug-url must have a value])
elif [ ! [[ $with_vendor_bug_url =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-vendor-bug-url contains non-printing characters: $with_vendor_bug_url])
elif test "x$with_vendor_bug_url" != x; then
# Only set VENDOR_URL_BUG if '--with-vendor-bug-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
UTIL_ARG_WITH(NAME: vendor-bug-url, TYPE: string,
RESULT: VENDOR_URL_BUG,
DEFAULT: $VENDOR_URL_BUG,
DESC: [Set the 'java.vendor.url.bug' system property],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
VENDOR_URL_BUG="$with_vendor_bug_url"
fi
AC_SUBST(VENDOR_URL_BUG)
# The vendor VM bug URL, if any
AC_ARG_WITH(vendor-vm-bug-url, [AS_HELP_STRING([--with-vendor-vm-bug-url],
[Sets the bug URL which will be displayed when the VM crashes @<:@not specified@:>@])])
if test "x$with_vendor_vm_bug_url" = xyes || test "x$with_vendor_vm_bug_url" = xno; then
AC_MSG_ERROR([--with-vendor-vm-bug-url must have a value])
elif [ ! [[ $with_vendor_vm_bug_url =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-vendor-vm-bug-url contains non-printing characters: $with_vendor_vm_bug_url])
elif test "x$with_vendor_vm_bug_url" != x; then
# Only set VENDOR_URL_VM_BUG if '--with-vendor-vm-bug-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
UTIL_ARG_WITH(NAME: vendor-vm-bug-url, TYPE: string,
RESULT: VENDOR_URL_VM_BUG,
DEFAULT: $VENDOR_URL_VM_BUG,
DESC: [Sets the bug URL which will be displayed when the VM crashes],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
VENDOR_URL_VM_BUG="$with_vendor_vm_bug_url"
fi
AC_SUBST(VENDOR_URL_VM_BUG)
# Override version from arguments
# If --with-version-string is set, process it first. It is possible to
# override parts with more specific flags, since these are processed later.
UTIL_ARG_WITH(NAME: version-string, TYPE: string,
DEFAULT: [],
DESC: [Set version string],
DEFAULT_DESC: [calculated],
CHECK_VALUE: [
if test "x$RESULT" != x; then
AC_ARG_WITH(version-string, [AS_HELP_STRING([--with-version-string],
[Set version string @<:@calculated@:>@])])
if test "x$with_version_string" = xyes || test "x$with_version_string" = xno; then
AC_MSG_ERROR([--with-version-string must have a value])
elif test "x$with_version_string" != x; then
# Additional [] needed to keep m4 from mangling shell constructs.
if [ [[ $RESULT =~ ^([0-9]+)(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(-([a-zA-Z0-9]+))?(((\+)([0-9]*))?(-([-a-zA-Z0-9.]+))?)?$ ]] ]; then
if [ [[ $with_version_string =~ ^([0-9]+)(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(\.([0-9]+))?(-([a-zA-Z0-9]+))?(((\+)([0-9]*))?(-([-a-zA-Z0-9.]+))?)?$ ]] ]; then
VERSION_FEATURE=${BASH_REMATCH[[1]]}
VERSION_INTERIM=${BASH_REMATCH[[3]]}
VERSION_UPDATE=${BASH_REMATCH[[5]]}
@@ -205,10 +209,9 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
# We still allow them to explicitly override though.
NO_DEFAULT_VERSION_PARTS=true
else
FAILURE="--with-version-string fails to parse as a valid version string: $RESULT"
AC_MSG_ERROR([--with-version-string fails to parse as a valid version string: $with_version_string])
fi
fi
])
AC_ARG_WITH(version-pre, [AS_HELP_STRING([--with-version-pre],
[Set the base part of the version 'PRE' field (pre-release identifier) @<:@'internal'@:>@])],
@@ -287,18 +290,22 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
fi
fi
# Default is to get value from version-numbers.conf
if test "x$NO_DEFAULT_VERSION_PARTS" = xtrue; then
DEFAULT_VERSION_FEATURE="$VERSION_FEATURE"
fi
AC_ARG_WITH(version-feature, [AS_HELP_STRING([--with-version-feature],
[Set version 'FEATURE' field (first number) @<:@current source value@:>@])],
[with_version_feature_present=true], [with_version_feature_present=false])
UTIL_ARG_WITH(NAME: version-feature, TYPE: string,
DEFAULT: $DEFAULT_VERSION_FEATURE,
DESC: [Set version 'FEATURE' field (first number)],
DEFAULT_DESC: [current source value],
CHECK_VALUE: [
JDKVER_CHECK_AND_SET_NUMBER(VERSION_FEATURE, $RESULT)
])
if test "x$with_version_feature_present" = xtrue; then
if test "x$with_version_feature" = xyes || test "x$with_version_feature" = xno; then
AC_MSG_ERROR([--with-version-feature must have a value])
else
JDKVER_CHECK_AND_SET_NUMBER(VERSION_FEATURE, $with_version_feature)
fi
else
if test "x$NO_DEFAULT_VERSION_PARTS" != xtrue; then
# Default is to get value from version-numbers.conf
VERSION_FEATURE="$DEFAULT_VERSION_FEATURE"
fi
fi
AC_ARG_WITH(version-interim, [AS_HELP_STRING([--with-version-interim],
[Set version 'INTERIM' field (second number) @<:@current source value@:>@])],
@@ -473,56 +480,80 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
VERSION_SHORT=$VERSION_NUMBER${VERSION_PRE:+-$VERSION_PRE}
# The version date
UTIL_ARG_WITH(NAME: version-date, TYPE: string,
DEFAULT: $DEFAULT_VERSION_DATE,
DESC: [Set version date],
DEFAULT_DESC: [current source value],
CHECK_VALUE: [
if test "x$RESULT" = x; then
FAILURE="--with-version-date cannot be empty"
elif [ ! [[ $RESULT =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]] ]; then
FAILURE="\"$RESULT\" is not a valid version date"
AC_ARG_WITH(version-date, [AS_HELP_STRING([--with-version-date],
[Set version date @<:@current source value@:>@])])
if test "x$with_version_date" = xyes || test "x$with_version_date" = xno; then
AC_MSG_ERROR([--with-version-date must have a value])
elif test "x$with_version_date" != x; then
if [ ! [[ $with_version_date =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]] ]; then
AC_MSG_ERROR(["$with_version_date" is not a valid version date])
else
VERSION_DATE="$with_version_date"
fi
else
VERSION_DATE="$DEFAULT_VERSION_DATE"
fi
])
# The vendor version string, if any
# DEFAULT is set to an empty string in the case of --with-vendor-version-string without
# any value, which would set VENDOR_VERSION_STRING_ENABLED to true and ultimately also
# cause VENDOR_VERSION_STRING to fall back to the value in DEFAULT
UTIL_ARG_WITH(NAME: vendor-version-string, TYPE: string,
DEFAULT: [],
OPTIONAL: true,
DESC: [Set vendor version string],
DEFAULT_DESC: [not specified])
if test "x$VENDOR_VERSION_STRING_ENABLED" = xtrue; then
if [ ! [[ $VENDOR_VERSION_STRING =~ ^[[:graph:]]*$ ]] ]; then
AC_MSG_ERROR([--with--vendor-version-string contains non-graphical characters: $VENDOR_VERSION_STRING])
fi
AC_ARG_WITH(vendor-version-string, [AS_HELP_STRING([--with-vendor-version-string],
[Set vendor version string @<:@not specified@:>@])])
if test "x$with_vendor_version_string" = xyes; then
AC_MSG_ERROR([--with-vendor-version-string must have a value])
elif [ ! [[ $with_vendor_version_string =~ ^[[:graph:]]*$ ]] ]; then
AC_MSG_ERROR([--with--vendor-version-string contains non-graphical characters: $with_vendor_version_string])
elif test "x$with_vendor_version_string" != xno; then
# Set vendor version string if --without is not passed
# Check not required if an empty value is passed, since VENDOR_VERSION_STRING
# would then be set to ""
VENDOR_VERSION_STRING="$with_vendor_version_string"
fi
# Set the MACOSX Bundle Name base
UTIL_ARG_WITH(NAME: macosx-bundle-name-base, TYPE: string,
DEFAULT: $MACOSX_BUNDLE_NAME_BASE,
DESC: [Set the MacOSX Bundle Name base. This is the base name for calculating MacOSX Bundle Names.],
DEFAULT_DESC: [from branding.conf],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
AC_ARG_WITH(macosx-bundle-name-base, [AS_HELP_STRING([--with-macosx-bundle-name-base],
[Set the MacOSX Bundle Name base. This is the base name for calculating MacOSX Bundle Names.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_name_base" = xyes || test "x$with_macosx_bundle_name_base" = xno; then
AC_MSG_ERROR([--with-macosx-bundle-name-base must have a value])
elif [ ! [[ $with_macosx_bundle_name_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-name-base contains non-printing characters: $with_macosx_bundle_name_base])
elif test "x$with_macosx_bundle_name_base" != x; then
# Set MACOSX_BUNDLE_NAME_BASE to the configured value.
MACOSX_BUNDLE_NAME_BASE="$with_macosx_bundle_name_base"
fi
AC_SUBST(MACOSX_BUNDLE_NAME_BASE)
# Set the MACOSX Bundle ID base
AC_ARG_WITH(macosx-bundle-id-base, [AS_HELP_STRING([--with-macosx-bundle-id-base],
[Set the MacOSX Bundle ID base. This is the base ID for calculating MacOSX Bundle IDs.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_id_base" = xyes || test "x$with_macosx_bundle_id_base" = xno; then
AC_MSG_ERROR([--with-macosx-bundle-id-base must have a value])
elif [ ! [[ $with_macosx_bundle_id_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-id-base contains non-printing characters: $with_macosx_bundle_id_base])
elif test "x$with_macosx_bundle_id_base" != x; then
# Set MACOSX_BUNDLE_ID_BASE to the configured value.
MACOSX_BUNDLE_ID_BASE="$with_macosx_bundle_id_base"
else
# If using the default value, append the VERSION_PRE if there is one
# to make it possible to tell official builds apart from developer builds
if test "x$VERSION_PRE" != x; then
MACOSX_BUNDLE_ID_BASE="$MACOSX_BUNDLE_ID_BASE-$VERSION_PRE"
fi
# Set the MACOSX Bundle ID base
UTIL_ARG_WITH(NAME: macosx-bundle-id-base, TYPE: string,
DEFAULT: $MACOSX_BUNDLE_ID_BASE,
DESC: [Set the MacOSX Bundle ID base. This is the base ID for calculating MacOSX Bundle IDs.],
DEFAULT_DESC: [based on branding.conf and VERSION_PRE],
CHECK_VALUE: [UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE])
fi
AC_SUBST(MACOSX_BUNDLE_ID_BASE)
# Set the MACOSX CFBundleVersion field
AC_ARG_WITH(macosx-bundle-build-version, [AS_HELP_STRING([--with-macosx-bundle-build-version],
[Set the MacOSX Bundle CFBundleVersion field. This key is a machine-readable
string composed of one to three period-separated integers and should represent the
build version. Defaults to the build number.])])
if test "x$with_macosx_bundle_build_version" = xyes || test "x$with_macosx_bundle_build_version" = xno; then
AC_MSG_ERROR([--with-macosx-bundle-build-version must have a value])
elif [ ! [[ $with_macosx_bundle_build_version =~ ^[0-9\.]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-build-version contains non numbers and periods: $with_macosx_bundle_build_version])
elif test "x$with_macosx_bundle_build_version" != x; then
MACOSX_BUNDLE_BUILD_VERSION="$with_macosx_bundle_build_version"
else
if test "x$VERSION_BUILD" != x; then
MACOSX_BUNDLE_BUILD_VERSION="$VERSION_BUILD"
else
@@ -533,21 +564,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
if [ [[ $VERSION_OPT =~ ^[0-9\.]+$ ]] ]; then
MACOSX_BUNDLE_BUILD_VERSION="$MACOSX_BUNDLE_BUILD_VERSION.$VERSION_OPT"
fi
# Set the MACOSX CFBundleVersion field
UTIL_ARG_WITH(NAME: macosx-bundle-build-version, TYPE: string,
DEFAULT: $MACOSX_BUNDLE_BUILD_VERSION,
DESC: [Set the MacOSX Bundle CFBundleVersion field. This key is a machine-readable
string composed of one to three period-separated integers and should represent the
build version.],
DEFAULT_DESC: [the build number],
CHECK_VALUE: [
if test "x$RESULT" = x; then
FAILURE="--with-macosx-bundle-build-version must have a value"
elif [ ! [[ $RESULT =~ ^[0-9\.]*$ ]] ]; then
FAILURE="--with-macosx-bundle-build-version contains non numbers and periods: $RESULT"
fi
])
AC_SUBST(MACOSX_BUNDLE_BUILD_VERSION)
# We could define --with flags for these, if really needed

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -247,14 +247,8 @@ AC_DEFUN_ONCE([JVM_FEATURES_CHECK_CDS],
AC_DEFUN_ONCE([JVM_FEATURES_CHECK_DTRACE],
[
JVM_FEATURES_CHECK_AVAILABILITY(dtrace, [
AC_MSG_CHECKING([for dtrace tool and platform support])
if test "x$OPENJDK_TARGET_CPU_ARCH" = "xppc"; then
AC_MSG_RESULT([no, $OPENJDK_TARGET_CPU_ARCH])
AVAILABLE=false
elif test "x$OPENJDK_TARGET_CPU_ARCH" = "xs390"; then
AC_MSG_RESULT([no, $OPENJDK_TARGET_CPU_ARCH])
AVAILABLE=false
elif test "x$DTRACE" != "x" && test -x "$DTRACE"; then
AC_MSG_CHECKING([for dtrace tool])
if test "x$DTRACE" != "x" && test -x "$DTRACE"; then
AC_MSG_RESULT([$DTRACE])
else
AC_MSG_RESULT([no])

View File

@@ -70,25 +70,6 @@ AC_DEFUN_ONCE([LIB_SETUP_ALSA],
PKG_CHECK_MODULES(ALSA, alsa, [ALSA_FOUND=yes], [ALSA_FOUND=no])
fi
fi
if test "x$ALSA_FOUND" = xno; then
# If we have sysroot set, and no explicit library location is set,
# look at known locations in sysroot.
if test "x$SYSROOT" != "x" && test "x${with_alsa_lib}" == x; then
if test -f "$SYSROOT/usr/lib64/libasound.so" && test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
ALSA_LIBS="-L$SYSROOT/usr/lib64 -lasound"
ALSA_FOUND=yes
elif test -f "$SYSROOT/usr/lib/libasound.so"; then
ALSA_LIBS="-L$SYSROOT/usr/lib -lasound"
ALSA_FOUND=yes
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libasound.so"; then
ALSA_LIBS="-L$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI -lasound"
ALSA_FOUND=yes
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libasound.so"; then
ALSA_LIBS="-L$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI -lasound"
ALSA_FOUND=yes
fi
fi
fi
if test "x$ALSA_FOUND" = xno; then
AC_CHECK_HEADERS([alsa/asoundlib.h],
[

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2016, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -68,21 +68,13 @@ AC_DEFUN_ONCE([LIB_SETUP_CUPS],
fi
fi
if test "x$CUPS_FOUND" = xno; then
# Are the cups headers installed in the default AIX or /usr/include location?
if test "x$OPENJDK_TARGET_OS" = "xaix"; then
AC_CHECK_HEADERS([/opt/freeware/include/cups/cups.h /opt/freeware/include/cups/ppd.h], [
CUPS_FOUND=yes
CUPS_CFLAGS="-I/opt/freeware/include"
DEFAULT_CUPS=yes
])
else
# Are the cups headers installed in the default /usr/include location?
AC_CHECK_HEADERS([cups/cups.h cups/ppd.h], [
CUPS_FOUND=yes
CUPS_CFLAGS=
DEFAULT_CUPS=yes
])
fi
fi
if test "x$CUPS_FOUND" = xno; then
HELP_MSG_MISSING_DEPENDENCY([cups])
AC_MSG_ERROR([Could not find cups! $HELP_MSG ])

View File

@@ -134,8 +134,6 @@ AC_DEFUN([LIB_BUILD_BINUTILS],
BINUTILS_SRC="$with_binutils_src"
UTIL_FIXUP_PATH(BINUTILS_SRC)
BINUTILS_DIR="$CONFIGURESUPPORT_OUTPUTDIR/binutils"
if ! test -d $BINUTILS_SRC; then
AC_MSG_ERROR([--with-binutils-src is not pointing to a directory])
fi
@@ -143,14 +141,10 @@ AC_DEFUN([LIB_BUILD_BINUTILS],
AC_MSG_ERROR([--with-binutils-src does not look like a binutils source directory])
fi
if ! test -d $BINUTILS_DIR; then
$MKDIR -p $BINUTILS_DIR
fi
if test -e $BINUTILS_DIR/bfd/libbfd.a && \
test -e $BINUTILS_DIR/opcodes/libopcodes.a && \
test -e $BINUTILS_DIR/libiberty/libiberty.a && \
test -e $BINUTILS_DIR/zlib/libz.a; then
if test -e $BINUTILS_SRC/bfd/libbfd.a && \
test -e $BINUTILS_SRC/opcodes/libopcodes.a && \
test -e $BINUTILS_SRC/libiberty/libiberty.a && \
test -e $BINUTILS_SRC/zlib/libz.a; then
AC_MSG_NOTICE([Found binutils binaries in binutils source directory -- not building])
else
# On Windows, we cannot build with the normal Microsoft CL, but must instead use
@@ -181,20 +175,16 @@ AC_DEFUN([LIB_BUILD_BINUTILS],
fi
else
binutils_cc="$CC $SYSROOT_CFLAGS"
if test "x$COMPILE_TYPE" = xcross; then
binutils_target="--host=$OPENJDK_TARGET_AUTOCONF_NAME"
else
binutils_target=""
fi
fi
binutils_cflags="$binutils_cflags $MACHINE_FLAG $JVM_PICFLAG $C_O_FLAG_NORM"
AC_MSG_NOTICE([Running binutils configure])
AC_MSG_NOTICE([configure command line: cd $BINUTILS_DIR && $BINUTILS_SRC/configure --disable-nls CFLAGS="$binutils_cflags" CC="$binutils_cc" AR="$AR" $binutils_target])
AC_MSG_NOTICE([configure command line: ./configure --disable-nls CFLAGS="$binutils_cflags" CC="$binutils_cc" $binutils_target])
saved_dir=`pwd`
cd "$BINUTILS_DIR"
$BINUTILS_SRC/configure --disable-nls CFLAGS="$binutils_cflags" CC="$binutils_cc" AR="$AR" $binutils_target
if test $? -ne 0 || ! test -e $BINUTILS_DIR/Makefile; then
cd "$BINUTILS_SRC"
./configure --disable-nls CFLAGS="$binutils_cflags" CC="$binutils_cc" $binutils_target
if test $? -ne 0 || ! test -e $BINUTILS_SRC/Makefile; then
AC_MSG_NOTICE([Automatic building of binutils failed on configure. Try building it manually])
AC_MSG_ERROR([Cannot continue])
fi
@@ -207,6 +197,8 @@ AC_DEFUN([LIB_BUILD_BINUTILS],
cd $saved_dir
AC_MSG_NOTICE([Building of binutils done])
fi
BINUTILS_DIR="$BINUTILS_SRC"
])
################################################################################
@@ -242,14 +234,8 @@ AC_DEFUN([LIB_SETUP_HSDIS_BINUTILS],
elif test "x$BINUTILS_DIR" != x; then
if test -e $BINUTILS_DIR/bfd/libbfd.a && \
test -e $BINUTILS_DIR/opcodes/libopcodes.a && \
test -e $BINUTILS_DIR/libiberty/libiberty.a && \
test -e $BINUTILS_DIR/zlib/libz.a; then
HSDIS_CFLAGS="-DLIBARCH_$OPENJDK_TARGET_CPU_LEGACY_LIB"
if test -n "$BINUTILS_SRC"; then
HSDIS_CFLAGS="$HSDIS_CFLAGS -I$BINUTILS_SRC/include -I$BINUTILS_DIR/bfd"
else
HSDIS_CFLAGS="$HSDIS_CFLAGS -I$BINUTILS_DIR/include -I$BINUTILS_DIR/bfd"
fi
test -e $BINUTILS_DIR/libiberty/libiberty.a; then
HSDIS_CFLAGS="-I$BINUTILS_DIR/include -I$BINUTILS_DIR/bfd -DLIBARCH_$OPENJDK_TARGET_CPU_LEGACY_LIB"
HSDIS_LDFLAGS=""
HSDIS_LIBS="$BINUTILS_DIR/bfd/libbfd.a $BINUTILS_DIR/opcodes/libopcodes.a $BINUTILS_DIR/libiberty/libiberty.a $BINUTILS_DIR/zlib/libz.a"
fi

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2018, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -27,9 +27,8 @@
# Setup libraries and functionalities needed to test the JDK.
################################################################################
# Minimum supported versions
JTREG_MINIMUM_VERSION=7.3.1
GTEST_MINIMUM_VERSION=1.14.0
# Minimum supported version
JTREG_MINIMUM_VERSION=7
###############################################################################
#
@@ -55,18 +54,9 @@ AC_DEFUN_ONCE([LIB_TESTS_SETUP_GTEST],
AC_MSG_RESULT([no])
AC_MSG_ERROR([Can't find 'googlemock/include/gmock/gmock.h' under ${with_gtest} given with the --with-gtest option.])
else
GTEST_FRAMEWORK_SRC=$with_gtest
GTEST_FRAMEWORK_SRC=${with_gtest}
AC_MSG_RESULT([$GTEST_FRAMEWORK_SRC])
UTIL_FIXUP_PATH([GTEST_FRAMEWORK_SRC])
# Verify that the version is the required one.
# This is a simplified version of TOOLCHAIN_CHECK_COMPILER_VERSION
gtest_version="`$GREP GOOGLETEST_VERSION $GTEST_FRAMEWORK_SRC/CMakeLists.txt | $SED -e 's/set(GOOGLETEST_VERSION \(.*\))/\1/'`"
comparable_actual_version=`$AWK -F. '{ printf("%05d%05d%05d%05d\n", [$]1, [$]2, [$]3, [$]4) }' <<< "$gtest_version"`
comparable_minimum_version=`$AWK -F. '{ printf("%05d%05d%05d%05d\n", [$]1, [$]2, [$]3, [$]4) }' <<< "$GTEST_MINIMUM_VERSION"`
if test $comparable_actual_version -lt $comparable_minimum_version ; then
AC_MSG_ERROR([gtest version is too old, at least version $GTEST_MINIMUM_VERSION is required])
fi
fi
fi
fi
@@ -227,47 +217,12 @@ AC_DEFUN_ONCE([LIB_TESTS_SETUP_JTREG],
UTIL_FIXUP_PATH(JT_HOME)
AC_SUBST(JT_HOME)
# Specify a JDK for running jtreg. Defaults to the BOOT_JDK.
AC_ARG_WITH(jtreg-jdk, [AS_HELP_STRING([--with-jdk],
[path to JDK for running jtreg @<:@BOOT_JDK@:>@])])
AC_MSG_CHECKING([for jtreg jdk])
if test "x${with_jtreg_jdk}" != x; then
if test "x${with_jtreg_jdk}" = xno; then
AC_MSG_RESULT([no, jtreg jdk not specified])
elif test "x${with_jtreg_jdk}" = xyes; then
AC_MSG_RESULT([not specified])
AC_MSG_ERROR([--with-jtreg-jdk needs a value])
else
JTREG_JDK="${with_jtreg_jdk}"
AC_MSG_RESULT([$JTREG_JDK])
UTIL_FIXUP_PATH(JTREG_JDK)
if test ! -f "$JTREG_JDK/bin/java"; then
AC_MSG_ERROR([Could not find jtreg java at $JTREG_JDK/bin/java])
fi
fi
else
JTREG_JDK="${BOOT_JDK}"
AC_MSG_RESULT([no, using BOOT_JDK])
fi
UTIL_FIXUP_PATH(JTREG_JDK)
AC_SUBST([JTREG_JDK])
# For use in the configure script
JTREG_JAVA="$FIXPATH $JTREG_JDK/bin/java"
# Verify jtreg version
if test "x$JT_HOME" != x; then
AC_MSG_CHECKING([jtreg jar existence])
if test ! -f "$JT_HOME/lib/jtreg.jar"; then
AC_MSG_ERROR([Could not find jtreg jar at $JT_HOME/lib/jtreg.jar])
fi
AC_MSG_CHECKING([jtreg version number])
# jtreg -version looks like this: "jtreg 6.1+1-19"
# Extract actual version part ("6.1" in this case)
jtreg_version_full=$($JTREG_JAVA -jar $JT_HOME/lib/jtreg.jar -version | $HEAD -n 1 | $CUT -d ' ' -f 2)
jtreg_version_full=`$JAVA -jar $JT_HOME/lib/jtreg.jar -version | $HEAD -n 1 | $CUT -d ' ' -f 2`
jtreg_version=${jtreg_version_full/%+*}
AC_MSG_RESULT([$jtreg_version])
@@ -314,16 +269,10 @@ AC_DEFUN_ONCE([LIB_TESTS_SETUP_JIB],
#
AC_DEFUN_ONCE([LIB_TESTS_ENABLE_DISABLE_FAILURE_HANDLER],
[
if test "x$BUILD_ENV" = "xci"; then
BUILD_FAILURE_HANDLER_DEFAULT=auto
else
BUILD_FAILURE_HANDLER_DEFAULT=false
fi
UTIL_ARG_ENABLE(NAME: jtreg-failure-handler, DEFAULT: $BUILD_FAILURE_HANDLER_DEFAULT,
UTIL_ARG_ENABLE(NAME: jtreg-failure-handler, DEFAULT: auto,
RESULT: BUILD_FAILURE_HANDLER,
DESC: [enable building of the jtreg failure handler],
DEFAULT_DESC: [enabled if jtreg is present and build env is CI],
DEFAULT_DESC: [enabled if jtreg is present],
CHECKING_MSG: [if the jtreg failure handler should be built],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if the jtreg failure handler is available])
@@ -336,22 +285,3 @@ AC_DEFUN_ONCE([LIB_TESTS_ENABLE_DISABLE_FAILURE_HANDLER],
])
AC_SUBST(BUILD_FAILURE_HANDLER)
])
AC_DEFUN_ONCE([LIB_TESTS_ENABLE_DISABLE_JTREG_TEST_THREAD_FACTORY],
[
UTIL_ARG_ENABLE(NAME: jtreg-test-thread-factory, DEFAULT: auto,
RESULT: BUILD_JTREG_TEST_THREAD_FACTORY,
DESC: [enable building of the jtreg test thread factory],
DEFAULT_DESC: [enabled if jtreg is present],
CHECKING_MSG: [if the jtreg test thread factory should be built],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if the jtreg test thread factory is available])
if test "x$JT_HOME" != "x"; then
AC_MSG_RESULT([yes])
else
AVAILABLE=false
AC_MSG_RESULT([no (jtreg not present)])
fi
])
AC_SUBST(BUILD_JTREG_TEST_THREAD_FACTORY)
])

View File

@@ -0,0 +1,162 @@
#
# Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2023, JetBrains s.r.o.. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
################################################################################
# Setup wayland
################################################################################
AC_DEFUN_ONCE([LIB_SETUP_WAYLAND],
[
AC_ARG_WITH(wayland, [AS_HELP_STRING([--with-wayland],
[specify prefix directory for the wayland package
(expecting the headers under PATH/include)])])
AC_ARG_WITH(wayland-include, [AS_HELP_STRING([--with-wayland-include],
[specify directory for the wayland include files])])
if test "x$NEEDS_LIB_WAYLAND" = xfalse; then
if (test "x${with_wayland}" != x && test "x${with_wayland}" != xno) || \
(test "x${with_wayland_include}" != x && test "x${with_wayland_include}" != xno); then
AC_MSG_WARN([[wayland not used, so --with-wayland[-*] is ignored]])
fi
WAYLAND_CFLAGS=
WAYLAND_LIBS=
VULKAN_FLAGS=
VULKAN_ENABLED=false
else
WAYLAND_FOUND=no
if test "x${with_wayland}" = xno || test "x${with_wayland_include}" = xno; then
AC_MSG_ERROR([It is not possible to disable the use of wayland. Remove the --without-wayland option.])
fi
if test "x${with_wayland}" != x; then
AC_MSG_CHECKING([for wayland headers])
if test -s "${with_wayland}/include/wayland-client.h" && test -s "${with_wayland}/include/wayland-cursor.h"; then
WAYLAND_CFLAGS="-I${with_wayland}/include"
WAYLAND_LIBS="-L${with_wayland}/lib -lwayland-client -lwayland-cursor"
WAYLAND_FOUND=yes
AC_MSG_RESULT([$WAYLAND_FOUND])
else
AC_MSG_ERROR([Can't find 'include/wayland-client.h' and 'include/wayland-cursor.h' under ${with_wayland} given with the --with-wayland option.])
fi
fi
if test "x${with_wayland_include}" != x; then
AC_MSG_CHECKING([for wayland headers])
if test -s "${with_wayland_include}/wayland-client.h" && test -s "${with_wayland_include}/wayland-cursor.h"; then
WAYLAND_CFLAGS="-I${with_wayland_include}"
WAYLAND_FOUND=yes
AC_MSG_RESULT([$WAYLAND_FOUND])
else
AC_MSG_ERROR([Can't find 'wayland-client.h' and 'wayland-cursor.h' under ${with_wayland_include} given with the --with-wayland-include option.])
fi
fi
if test "x$WAYLAND_FOUND" = xno; then
# Are the wayland headers installed in the default /usr/include location?
AC_CHECK_HEADERS([wayland-client.h wayland-cursor.h],
[ WAYLAND_FOUND=yes ],
[ WAYLAND_FOUND=no; break ]
)
if test "x$WAYLAND_FOUND" = xyes; then
WAYLAND_CFLAGS=
WAYLAND_LIBS="-lwayland-client -lwayland-cursor"
DEFAULT_WAYLAND=yes
fi
fi
if test "x$WAYLAND_FOUND" = xno; then
HELP_MSG_MISSING_DEPENDENCY([wayland])
AC_MSG_ERROR([Could not find wayland! $HELP_MSG ])
fi
# Checking for vulkan sdk
AC_ARG_WITH(vulkan, [AS_HELP_STRING([--with-vulkan],
[specify whether we use vulkan])])
AC_ARG_WITH(vulkan-include, [AS_HELP_STRING([--with-vulkan-include],
[specify directory for the vulkan include files])])
if test "x$SUPPORTS_LIB_VULKAN" = xfalse; then
if (test "x${with_vulkan}" != x && test "x${with_vulkan}" != xno) || \
(test "x${with_vulkan_include}" != x && test "x${with_vulkan_include}" != xno); then
AC_MSG_WARN([[vulkan not used, so --with-vulkan-include is ignored]])
fi
VULKAN_FLAGS=
VULKAN_ENABLED=false
else
# Do not build vulkan rendering pipeline by default
if (test "x${with_vulkan}" = x && test "x${with_vulkan_include}" = x) || \
test "x${with_vulkan}" = xno || test "x${with_vulkan_include}" = xno ; then
VULKAN_FLAGS=
VULKAN_ENABLED=false
else
VULKAN_FOUND=no
if test "x${with_vulkan_include}" != x; then
AC_CHECK_HEADERS([${with_vulkan_include}/include/vulkan/vulkan.h],
[ VULKAN_FOUND=yes
VULKAN_FLAGS="-DVK_USE_PLATFORM_WAYLAND_KHR -I${with_vulkan_include}/include -DVULKAN_ENABLED"
VULKAN_ENABLED=true
],
[ AC_MSG_ERROR([Can't find 'vulkan/vulkan.h' under '${with_vulkan_include}']) ]
)
fi
if test "x$VULKAN_FOUND" = xno; then
# Check vulkan sdk location
AC_CHECK_HEADERS([$VULKAN_SDK/include/vulkan/vulkan.h],
[ VULKAN_FOUND=yes
VULKAN_FLAGS="-DVK_USE_PLATFORM_WAYLAND_KHR -I${VULKAN_SDK}/include -DVULKAN_ENABLED"
VULKAN_ENABLED=true
],
[ VULKAN_FOUND=no; break ]
)
fi
if test "x$VULKAN_FOUND" = xno; then
# Check default /usr/include location
AC_CHECK_HEADERS([vulkan/vulkan.h],
[ VULKAN_FOUND=yes
VULKAN_FLAGS="-DVK_USE_PLATFORM_WAYLAND_KHR -DVULKAN_ENABLED"
VULKAN_ENABLED=true
],
[ VULKAN_FOUND=no; break ]
)
fi
if test "x$VULKAN_FOUND" = xno; then
HELP_MSG_MISSING_DEPENDENCY([vulkan])
AC_MSG_ERROR([Could not find vulkan! $HELP_MSG ])
fi
fi
fi
fi
AC_SUBST(VULKAN_FLAGS)
AC_SUBST(VULKAN_ENABLED)
AC_SUBST(WAYLAND_CFLAGS)
AC_SUBST(WAYLAND_LIBS)
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -35,7 +35,6 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
X_CFLAGS=
X_LIBS=
else
x_libraries_orig="$x_libraries"
if test "x${with_x}" = xno; then
AC_MSG_ERROR([It is not possible to disable the use of X11. Remove the --without-x option.])
@@ -49,7 +48,6 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
fi
if test "x$x_libraries" = xNONE; then
x_libraries="${with_x}/lib"
x_libraries_orig="$x_libraries"
fi
else
# Check if the user has specified sysroot, but not --with-x, --x-includes or --x-libraries.
@@ -71,9 +69,9 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
elif test -f "$SYSROOT/usr/lib/libX11.so"; then
x_libraries="$SYSROOT/usr/lib"
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"; then
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI"
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"; then
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI"
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"
fi
fi
fi
@@ -84,8 +82,8 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
AC_PATH_XTRA
# AC_PATH_XTRA creates X_LIBS and sometimes adds -R flags. When cross compiling
# this doesn't make sense so we remove it; same for sysroot (devkit).
if test "x$COMPILE_TYPE" = xcross || (test "x$SYSROOT" != "x" && test "x$x_libraries_orig" = xNONE); then
# this doesn't make sense so we remove it.
if test "x$COMPILE_TYPE" = xcross; then
X_LIBS=`$ECHO $X_LIBS | $SED 's/-R \{0,1\}[[^ ]]*//g'`
fi
@@ -98,20 +96,16 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
OLD_CFLAGS="$CFLAGS"
CFLAGS="$CFLAGS $SYSROOT_CFLAGS $X_CFLAGS"
HEADERS_TO_CHECK="X11/extensions/shape.h X11/extensions/Xrender.h X11/extensions/XTest.h X11/Intrinsic.h"
# There is no Xrandr extension on AIX
if test "x$OPENJDK_TARGET_OS" = xaix; then
# There is no Xrandr extension on AIX. Code is duplicated to avoid autoconf
# 2.71+ warning "AC_CHECK_HEADERS: you should use literals"
X_CFLAGS="$X_CFLAGS -DNO_XRANDR"
AC_CHECK_HEADERS([X11/extensions/shape.h X11/extensions/Xrender.h X11/extensions/XTest.h X11/Intrinsic.h],
[X11_HEADERS_OK=yes],
[X11_HEADERS_OK=no; break],
[
# include <X11/Xlib.h>
# include <X11/Xutil.h>
]
)
else
AC_CHECK_HEADERS([X11/extensions/shape.h X11/extensions/Xrender.h X11/extensions/XTest.h X11/Intrinsic.h X11/extensions/Xrandr.h],
HEADERS_TO_CHECK="$HEADERS_TO_CHECK X11/extensions/Xrandr.h"
fi
# Need to include Xlib.h and Xutil.h to avoid "present but cannot be compiled" warnings on Solaris 10
AC_CHECK_HEADERS([$HEADERS_TO_CHECK],
[X11_HEADERS_OK=yes],
[X11_HEADERS_OK=no; break],
[
@@ -119,7 +113,6 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
# include <X11/Xutil.h>
]
)
fi
if test "x$X11_HEADERS_OK" = xno; then
HELP_MSG_MISSING_DEPENDENCY([x11])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -33,6 +33,7 @@ m4_include([lib-freetype.m4])
m4_include([lib-hsdis.m4])
m4_include([lib-std.m4])
m4_include([lib-x11.m4])
m4_include([lib-wayland.m4])
m4_include([lib-tests.m4])
@@ -41,14 +42,23 @@ m4_include([lib-tests.m4])
################################################################################
AC_DEFUN_ONCE([LIB_DETERMINE_DEPENDENCIES],
[
# Check if X11 is needed
# Check if X11, wayland and vulkan is needed
if test "x$OPENJDK_TARGET_OS" = xwindows || test "x$OPENJDK_TARGET_OS" = xmacosx; then
# No X11 support on windows or macosx
# No X11 and wayland support on windows or macosx
NEEDS_LIB_X11=false
NEEDS_LIB_WAYLAND=false
SUPPORTS_LIB_VULKAN=false
elif test "x$ENABLE_HEADLESS_ONLY" = xtrue; then
# No X11 support needed when building headless only
NEEDS_LIB_X11=false
NEEDS_LIB_WAYLAND=false
SUPPORTS_LIB_VULKAN=false
else
# All other instances need X11, even if building headless only, libawt still
# All other instances need X11 and wayland, even if building headless only, libawt still
# needs X11 headers.
NEEDS_LIB_X11=true
NEEDS_LIB_WAYLAND=true
SUPPORTS_LIB_VULKAN=true
fi
# Check if fontconfig is needed
@@ -82,34 +92,13 @@ AC_DEFUN_ONCE([LIB_DETERMINE_DEPENDENCIES],
fi
# Check if ffi is needed
if HOTSPOT_CHECK_JVM_VARIANT(zero) || test "x$ENABLE_FALLBACK_LINKER" = "xtrue"; then
if HOTSPOT_CHECK_JVM_VARIANT(zero); then
NEEDS_LIB_FFI=true
else
NEEDS_LIB_FFI=false
fi
])
################################################################################
# Setup BASIC_JVM_LIBS that can be different depending on build/target platform
################################################################################
AC_DEFUN([LIB_SETUP_JVM_LIBS],
[
# Atomic library
# 32-bit platforms needs fallback library for 8-byte atomic ops on Zero
if HOTSPOT_CHECK_JVM_VARIANT(zero); then
if test "x$OPENJDK_$1_OS" = xlinux &&
(test "x$OPENJDK_$1_CPU" = xarm ||
test "x$OPENJDK_$1_CPU" = xm68k ||
test "x$OPENJDK_$1_CPU" = xmips ||
test "x$OPENJDK_$1_CPU" = xmipsel ||
test "x$OPENJDK_$1_CPU" = xppc ||
test "x$OPENJDK_$1_CPU" = xsh ||
test "x$OPENJDK_$1_CPU" = xriscv32); then
BASIC_JVM_LIBS_$1="$BASIC_JVM_LIBS_$1 -latomic"
fi
fi
])
################################################################################
# Parse library options, and setup needed libraries
################################################################################
@@ -126,11 +115,11 @@ AC_DEFUN_ONCE([LIB_SETUP_LIBRARIES],
LIB_SETUP_LIBFFI
LIB_SETUP_MISC_LIBS
LIB_SETUP_X11
LIB_SETUP_WAYLAND
LIB_TESTS_SETUP_GTEST
BASIC_JDKLIB_LIBS=""
BASIC_JDKLIB_LIBS_TARGET=""
if test "x$TOOLCHAIN_TYPE" != xmicrosoft; then
BASIC_JDKLIB_LIBS="-ljava -ljvm"
fi
@@ -157,6 +146,27 @@ AC_DEFUN_ONCE([LIB_SETUP_LIBRARIES],
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -lrt"
fi
# Atomic library
# 32-bit platforms needs fallback library for 8-byte atomic ops on Zero
if HOTSPOT_CHECK_JVM_VARIANT(zero); then
if test "x$OPENJDK_TARGET_OS" = xlinux &&
(test "x$OPENJDK_TARGET_CPU" = xarm ||
test "x$OPENJDK_TARGET_CPU" = xm68k ||
test "x$OPENJDK_TARGET_CPU" = xmips ||
test "x$OPENJDK_TARGET_CPU" = xmipsel ||
test "x$OPENJDK_TARGET_CPU" = xppc ||
test "x$OPENJDK_TARGET_CPU" = xsh ||
test "x$OPENJDK_TARGET_CPU" = xriscv32); then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -latomic"
fi
fi
# Because RISC-V only has word-sized atomics, it requires libatomic where
# other common architectures do not. So link libatomic by default.
if test "x$OPENJDK_TARGET_OS" = xlinux && test "x$OPENJDK_TARGET_CPU" = xriscv64; then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -latomic"
fi
# perfstat lib
if test "x$OPENJDK_TARGET_OS" = xaix; then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -lperfstat"
@@ -164,17 +174,15 @@ AC_DEFUN_ONCE([LIB_SETUP_LIBRARIES],
if test "x$OPENJDK_TARGET_OS" = xwindows; then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS kernel32.lib user32.lib gdi32.lib winspool.lib \
comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib powrprof.lib uuid.lib \
ws2_32.lib winmm.lib version.lib psapi.lib"
comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib \
wsock32.lib winmm.lib version.lib psapi.lib"
fi
LIB_SETUP_JVM_LIBS(BUILD)
LIB_SETUP_JVM_LIBS(TARGET)
JDKLIB_LIBS="$BASIC_JDKLIB_LIBS"
JDKEXE_LIBS=""
JVM_LIBS="$BASIC_JVM_LIBS $BASIC_JVM_LIBS_TARGET"
JVM_LIBS="$BASIC_JVM_LIBS"
OPENJDK_BUILD_JDKLIB_LIBS="$BASIC_JDKLIB_LIBS"
OPENJDK_BUILD_JVM_LIBS="$BASIC_JVM_LIBS $BASIC_JVM_LIBS_BUILD"
OPENJDK_BUILD_JVM_LIBS="$BASIC_JVM_LIBS"
AC_SUBST(JDKLIB_LIBS)
AC_SUBST(JDKEXE_LIBS)

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -567,6 +567,8 @@ AC_DEFUN([PLATFORM_SETUP_LEGACY_VARS_HELPER],
HOTSPOT_$1_CPU_DEFINE=PPC64
elif test "x$OPENJDK_$1_CPU" = xppc64le; then
HOTSPOT_$1_CPU_DEFINE=PPC64
elif test "x$OPENJDK_$1_CPU" = xriscv32; then
HOTSPOT_$1_CPU_DEFINE=RISCV32
elif test "x$OPENJDK_$1_CPU" = xriscv64; then
HOTSPOT_$1_CPU_DEFINE=RISCV64
@@ -575,14 +577,10 @@ AC_DEFUN([PLATFORM_SETUP_LEGACY_VARS_HELPER],
HOTSPOT_$1_CPU_DEFINE=SPARC
elif test "x$OPENJDK_$1_CPU" = xppc; then
HOTSPOT_$1_CPU_DEFINE=PPC32
elif test "x$OPENJDK_$1_CPU" = xriscv32; then
HOTSPOT_$1_CPU_DEFINE=RISCV32
elif test "x$OPENJDK_$1_CPU" = xs390; then
HOTSPOT_$1_CPU_DEFINE=S390
elif test "x$OPENJDK_$1_CPU" = xs390x; then
HOTSPOT_$1_CPU_DEFINE=S390
elif test "x$OPENJDK_$1_CPU" = xloongarch64; then
HOTSPOT_$1_CPU_DEFINE=LOONGARCH64
elif test "x$OPENJDK_$1_CPU" != x; then
HOTSPOT_$1_CPU_DEFINE=$(echo $OPENJDK_$1_CPU | tr a-z A-Z)
fi
@@ -642,7 +640,6 @@ AC_DEFUN([PLATFORM_SET_MODULE_TARGET_OS_VALUES],
])
#%%% Build and target systems %%%
# Make sure to only use tools set up in BASIC_SETUP_FUNDAMENTAL_TOOLS.
AC_DEFUN_ONCE([PLATFORM_SETUP_OPENJDK_BUILD_AND_TARGET],
[
# Figure out the build and target systems. # Note that in autoconf terminology, "build" is obvious, but "target"
@@ -659,21 +656,6 @@ AC_DEFUN_ONCE([PLATFORM_SETUP_OPENJDK_BUILD_AND_TARGET],
PLATFORM_SET_MODULE_TARGET_OS_VALUES
PLATFORM_SET_RELEASE_FILE_OS_VALUES
PLATFORM_SETUP_LEGACY_VARS
PLATFORM_CHECK_DEPRECATION
])
AC_DEFUN([PLATFORM_CHECK_DEPRECATION],
[
AC_ARG_ENABLE(deprecated-ports, [AS_HELP_STRING([--enable-deprecated-ports@<:@=yes/no@:>@],
[Suppress the error when configuring for a deprecated port @<:@no@:>@])])
if test "x$OPENJDK_TARGET_OS" = xwindows && test "x$OPENJDK_TARGET_CPU" = xx86; then
if test "x$enable_deprecated_ports" = "xyes"; then
AC_MSG_WARN([The Windows 32-bit x86 port is deprecated and may be removed in a future release.])
else
AC_MSG_ERROR(m4_normalize([The Windows 32-bit x86 port is deprecated and may be removed in a future release.
Use --enable-deprecated-ports=yes to suppress this error.]))
fi
fi
])
AC_DEFUN_ONCE([PLATFORM_SETUP_OPENJDK_BUILD_OS_VERSION],
@@ -741,7 +723,7 @@ AC_DEFUN_ONCE([PLATFORM_SETUP_OPENJDK_TARGET_ENDIANNESS],
[
###############################################################################
#
# Is the target little or big endian?
# Is the target little of big endian?
#
AC_C_BIGENDIAN([ENDIAN="big"],[ENDIAN="little"],[ENDIAN="unknown"],[ENDIAN="universal_endianness"])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -35,11 +35,6 @@ CONFIGURE_COMMAND_LINE:=@CONFIGURE_COMMAND_LINE@
# The current directory when configure was run
CONFIGURE_START_DIR:=@CONFIGURE_START_DIR@
# How configure was originally called, if not called directly
REAL_CONFIGURE_COMMAND_EXEC_SHORT := @REAL_CONFIGURE_COMMAND_EXEC_SHORT@
REAL_CONFIGURE_COMMAND_EXEC_FULL := @REAL_CONFIGURE_COMMAND_EXEC_FULL@
REAL_CONFIGURE_COMMAND_LINE := @REAL_CONFIGURE_COMMAND_LINE@
# A self-referential reference to this file.
SPEC:=@SPEC@
@@ -191,7 +186,6 @@ PRODUCT_NAME:=@PRODUCT_NAME@
PRODUCT_SUFFIX:=@PRODUCT_SUFFIX@
JDK_RC_PLATFORM_NAME:=@JDK_RC_PLATFORM_NAME@
JDK_RC_NAME:=@JDK_RC_NAME@
JDK_RC_COMPANY_NAME:=@JDK_RC_COMPANY_NAME@
COMPANY_NAME:=@COMPANY_NAME@
HOTSPOT_VM_DISTRO:=@HOTSPOT_VM_DISTRO@
MACOSX_BUNDLE_NAME_BASE=@MACOSX_BUNDLE_NAME_BASE@
@@ -359,8 +353,6 @@ BUILDJDK_OUTPUTDIR=$(OUTPUTDIR)/buildjdk
BUILD_FAILURE_HANDLER := @BUILD_FAILURE_HANDLER@
BUILD_JTREG_TEST_THREAD_FACTORY := @BUILD_JTREG_TEST_THREAD_FACTORY@
ENABLE_GENERATE_CLASSLIST := @ENABLE_GENERATE_CLASSLIST@
EXCLUDE_TRANSLATIONS := @EXCLUDE_TRANSLATIONS@
@@ -410,9 +402,6 @@ TEST_JOBS?=@TEST_JOBS@
DEFAULT_MAKE_TARGET:=@DEFAULT_MAKE_TARGET@
DEFAULT_LOG:=@DEFAULT_LOG@
# Fallback linker
ENABLE_FALLBACK_LINKER:=@ENABLE_FALLBACK_LINKER@
FREETYPE_TO_USE:=@FREETYPE_TO_USE@
FREETYPE_LIBS:=@FREETYPE_LIBS@
FREETYPE_CFLAGS:=@FREETYPE_CFLAGS@
@@ -425,7 +414,6 @@ LIBFFI_CFLAGS:=@LIBFFI_CFLAGS@
ENABLE_LIBFFI_BUNDLING:=@ENABLE_LIBFFI_BUNDLING@
LIBFFI_LIB_FILE:=@LIBFFI_LIB_FILE@
FILE_MACRO_CFLAGS := @FILE_MACRO_CFLAGS@
REPRODUCIBLE_CFLAGS := @REPRODUCIBLE_CFLAGS@
BRANCH_PROTECTION_CFLAGS := @BRANCH_PROTECTION_CFLAGS@
STATIC_LIBS_CFLAGS := @STATIC_LIBS_CFLAGS@
@@ -453,20 +441,22 @@ JCOV_INPUT_JDK=@JCOV_INPUT_JDK@
JCOV_FILTERS=@JCOV_FILTERS@
# AddressSanitizer
ASAN_ENABLED:=@ASAN_ENABLED@
# LeakSanitizer
LSAN_ENABLED:=@LSAN_ENABLED@
# UndefinedBehaviorSanitizer
UBSAN_ENABLED:=@UBSAN_ENABLED@
UBSAN_CFLAGS:=@UBSAN_CFLAGS@
UBSAN_LDFLAGS:=@UBSAN_LDFLAGS@
export ASAN_ENABLED:=@ASAN_ENABLED@
export DEVKIT_LIB_DIR:=@DEVKIT_LIB_DIR@
ifeq ($(ASAN_ENABLED), yes)
export ASAN_OPTIONS=handle_segv=0 detect_leaks=0
ifneq ($(DEVKIT_LIB_DIR),)
export LD_LIBRARY_PATH:=$(LD_LIBRARY_PATH):$(DEVKIT_LIB_DIR)
endif
endif
# Necessary additional compiler flags to compile X11
X_CFLAGS:=@X_CFLAGS@
X_LIBS:=@X_LIBS@
WAYLAND_CFLAGS:=@WAYLAND_CFLAGS@
WAYLAND_LIBS:=@WAYLAND_LIBS@
VULKAN_FLAGS:=@VULKAN_FLAGS@
VULKAN_ENABLED:=@VULKAN_ENABLED@
# The lowest required version of macosx
MACOSX_VERSION_MIN=@MACOSX_VERSION_MIN@
# The highest allowed version of macosx
@@ -603,7 +593,6 @@ AR := @AR@
ARFLAGS:=@ARFLAGS@
NM:=@NM@
NMFLAGS:=@NMFLAGS@
STRIP:=@STRIP@
OBJDUMP:=@OBJDUMP@
CXXFILT:=@CXXFILT@
@@ -681,9 +670,6 @@ JAR = $(JAR_CMD)
JLINK = $(JLINK_CMD)
JMOD = $(JMOD_CMD)
JTREG_JDK := @JTREG_JDK@
JTREG_JAVA = @FIXPATH@ $(JTREG_JDK)/bin/java $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
BUILD_JAVA_FLAGS := @BOOTCYCLE_JVM_ARGS_BIG@
BUILD_JAVA=@FIXPATH@ $(BUILD_JDK)/bin/java $(BUILD_JAVA_FLAGS)
BUILD_JAVAC=@FIXPATH@ $(BUILD_JDK)/bin/javac
@@ -701,8 +687,6 @@ INTERIM_LANGTOOLS_ADD_EXPORTS := \
--add-exports java.base/sun.invoke.util=jdk.compiler.interim \
--add-exports java.base/jdk.internal.javac=java.compiler.interim \
--add-exports java.base/jdk.internal.javac=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.compiler.interim \
--add-exports jdk.internal.opt/jdk.internal.opt=jdk.javadoc.interim \
#
INTERIM_LANGTOOLS_MODULES_COMMA := $(strip $(subst $(SPACE),$(COMMA),$(strip \
$(INTERIM_LANGTOOLS_MODULES))))
@@ -817,6 +801,57 @@ UCRT_DLL_DIR:=@UCRT_DLL_DIR@
ENABLE_PANDOC:=@ENABLE_PANDOC@
PANDOC_MARKDOWN_FLAG:=@PANDOC_MARKDOWN_FLAG@
####################################################
#
# INSTALLATION
#
# Common prefix for all installed files. Defaults to /usr/local,
# but /opt/myjdk is another common version.
INSTALL_PREFIX=@prefix@
# Directories containing architecture-dependent files should be relative to exec_prefix
INSTALL_EXECPREFIX=@exec_prefix@
# java,javac,javap etc are installed here.
INSTALL_BINDIR=@bindir@
# Read only architecture-independent data
INSTALL_DATADIR=@datadir@
# Root of above.
INSTALL_DATAROOTDIR=@datarootdir@
# Doc files, other than info and man.
INSTALL_DOCDIR=@docdir@
# Html documentation
INSTALL_HTMLDIR=@htmldir@
# Installing C header files, JNI headers for example.
INSTALL_INCLUDEDIR=@includedir@
# Installing library files....
INSTALL_INCLUDEDIR=@libdir@
# Executables that other programs run.
INSTALL_LIBEXECDIR=@libexecdir@
# Locale-dependent but architecture-independent data, such as message catalogs.
INSTALL_LOCALEDIR=@localedir@
# Modifiable single-machine data
INSTALL_LOCALSTATEDIR=@localstatedir@
# Man pages
INSTALL_MANDIR=@mandir@
# Modifiable architecture-independent data.
INSTALL_SHAREDSTATEDIR=@sharedstatedir@
# Read-only single-machine data
INSTALL_SYSCONFDIR=@sysconfdir@
####################################################
#
# Libraries

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -40,7 +40,7 @@ VALID_TOOLCHAINS_all="gcc clang xlc microsoft"
# These toolchains are valid on different platforms
VALID_TOOLCHAINS_linux="gcc clang"
VALID_TOOLCHAINS_macosx="clang"
VALID_TOOLCHAINS_aix="xlc clang"
VALID_TOOLCHAINS_aix="xlc"
VALID_TOOLCHAINS_windows="microsoft"
# Toolchain descriptions
@@ -53,7 +53,7 @@ TOOLCHAIN_DESCRIPTION_xlc="IBM XL C/C++"
TOOLCHAIN_MINIMUM_VERSION_clang="3.5"
TOOLCHAIN_MINIMUM_VERSION_gcc="6.0"
TOOLCHAIN_MINIMUM_VERSION_microsoft="19.28.0.0" # VS2019 16.8, aka MSVC 14.28
TOOLCHAIN_MINIMUM_VERSION_xlc="16.1.0.0011"
TOOLCHAIN_MINIMUM_VERSION_xlc=""
# Minimum supported linker versions, empty means unspecified
TOOLCHAIN_MINIMUM_LD_VERSION_gcc="2.18"
@@ -234,25 +234,6 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
# First toolchain type in the list is the default
DEFAULT_TOOLCHAIN=${VALID_TOOLCHAINS%% *}
# On AIX the default toolchain depends on the installed (found) compiler
# xlclang++ -> xlc toolchain
# ibm-clang++_r -> clang toolchain
# The compiler is searched on the PATH and TOOLCHAIN_PATH
# xlclang++ has precedence over ibm-clang++_r if both are installed
if test "x$OPENJDK_TARGET_OS" = xaix; then
DEFAULT_TOOLCHAIN="clang"
if test "x$TOOLCHAIN_PATH" != x; then
if test -e ${TOOLCHAIN_PATH}/xlclang++; then
DEFAULT_TOOLCHAIN="xlc"
fi
else
UTIL_LOOKUP_PROGS(XLCLANG_TEST_PATH, xlclang++)
if test "x$XLCLANG_TEST_PATH" != x; then
DEFAULT_TOOLCHAIN="xlc"
fi
fi
fi
if test "x$with_toolchain_type" = xlist; then
# List all toolchains
AC_MSG_NOTICE([The following toolchains are valid on this platform:])
@@ -282,16 +263,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
if test "x$TOOLCHAIN_PATH" != x; then
XLC_TEST_PATH=${TOOLCHAIN_PATH}/
fi
if test "x$TOOLCHAIN_TYPE" = xclang; then
TOOLCHAIN_DESCRIPTION_clang="IBM Open XL C/C++"
XLCLANG_VERSION_OUTPUT=`${XLC_TEST_PATH}ibm-clang++_r --version 2>&1 | $HEAD -n 1`
$ECHO "$XLCLANG_VERSION_OUTPUT" | $GREP "IBM Open XL C/C++ for AIX" > /dev/null
if test $? -eq 0; then
AC_MSG_NOTICE([ibm-clang++_r output: $XLCLANG_VERSION_OUTPUT])
else
AC_MSG_ERROR([ibm-clang++_r version output check failed, output: $XLCLANG_VERSION_OUTPUT])
fi
else
XLCLANG_VERSION_OUTPUT=`${XLC_TEST_PATH}xlclang++ -qversion 2>&1 | $HEAD -n 1`
$ECHO "$XLCLANG_VERSION_OUTPUT" | $GREP "IBM XL C/C++ for AIX" > /dev/null
if test $? -eq 0; then
@@ -300,22 +272,13 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
AC_MSG_ERROR([xlclang++ version output check failed, output: $XLCLANG_VERSION_OUTPUT])
fi
fi
fi
if test "x$OPENJDK_TARGET_OS" = xaix; then
TOOLCHAIN_CC_BINARY_clang="ibm-clang_r"
else
TOOLCHAIN_CC_BINARY_clang="clang"
fi
TOOLCHAIN_CC_BINARY_gcc="gcc"
TOOLCHAIN_CC_BINARY_microsoft="cl"
TOOLCHAIN_CC_BINARY_xlc="xlclang"
if test "x$OPENJDK_TARGET_OS" = xaix; then
TOOLCHAIN_CXX_BINARY_clang="ibm-clang++_r"
else
TOOLCHAIN_CXX_BINARY_clang="clang++"
fi
TOOLCHAIN_CXX_BINARY_gcc="g++"
TOOLCHAIN_CXX_BINARY_microsoft="cl"
TOOLCHAIN_CXX_BINARY_xlc="xlclang++"
@@ -389,10 +352,6 @@ AC_DEFUN_ONCE([TOOLCHAIN_POST_DETECTION],
# This is necessary since AC_PROG_CC defaults CFLAGS to "-g -O2"
CFLAGS="$ORG_CFLAGS"
CXXFLAGS="$ORG_CXXFLAGS"
# filter out some unwanted additions autoconf may add to CXX; we saw this on macOS with autoconf 2.72
UTIL_GET_NON_MATCHING_VALUES(cxx_filtered, $CXX, -std=c++11 -std=gnu++11)
CXX="$cxx_filtered"
])
# Check if a compiler is of the toolchain type we expect, and save the version
@@ -425,7 +384,7 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_COMPILER_VERSION],
# Collapse compiler output into a single line
COMPILER_VERSION_STRING=`$ECHO $COMPILER_VERSION_OUTPUT`
COMPILER_VERSION_NUMBER=`$ECHO $COMPILER_VERSION_OUTPUT | \
$SED -e 's/^.*Version: \(@<:@1-9@:>@@<:@0-9.@:>@*\).*$/\1/'`
$SED -e 's/^.*, V\(@<:@1-9@:>@@<:@0-9.@:>@*\).*$/\1/'`
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# There is no specific version flag, but all output starts with a version string.
# First line typically looks something like:
@@ -449,11 +408,6 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_COMPILER_VERSION],
# Copyright (C) 2013 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# or look like
# gcc (GCC) 10.2.1 20200825 (Alibaba 10.2.1-3.8 2.32)
# Copyright (C) 2020 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
COMPILER_VERSION_OUTPUT=`$COMPILER --version 2>&1`
# Check that this is likely to be GCC.
$ECHO "$COMPILER_VERSION_OUTPUT" | $GREP "Free Software Foundation" > /dev/null
@@ -467,8 +421,7 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_COMPILER_VERSION],
COMPILER_VERSION_STRING=`$ECHO $COMPILER_VERSION_OUTPUT | \
$SED -e 's/ *Copyright .*//'`
COMPILER_VERSION_NUMBER=`$ECHO $COMPILER_VERSION_OUTPUT | \
$AWK -F ')' '{print [$]2}' | \
$AWK '{print [$]1}'`
$SED -e 's/^.* \(@<:@1-9@:>@<:@0-9@:>@*\.@<:@0-9.@:>@*\)@<:@^0-9.@:>@.*$/\1/'`
elif test "x$TOOLCHAIN_TYPE" = xclang; then
# clang --version output typically looks like
# Apple LLVM version 5.0 (clang-500.2.79) (based on LLVM 3.3svn)
@@ -679,7 +632,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
if test "x$TOOLCHAIN_MINIMUM_VERSION" != x; then
TOOLCHAIN_CHECK_COMPILER_VERSION(VERSION: $TOOLCHAIN_MINIMUM_VERSION,
IF_OLDER_THAN: [
AC_MSG_WARN([You are using $TOOLCHAIN_TYPE $CC_VERSION_NUMBER which is older than $TOOLCHAIN_MINIMUM_VERSION. This is not a supported configuration.])
AC_MSG_WARN([You are using $TOOLCHAIN_TYPE older than $TOOLCHAIN_MINIMUM_VERSION. This is not a supported configuration.])
]
)
fi
@@ -814,11 +767,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_EXTRA],
case $TOOLCHAIN_TYPE in
gcc|clang)
if test "x$OPENJDK_TARGET_OS" = xaix; then
UTIL_REQUIRE_TOOLCHAIN_PROGS(CXXFILT, ibm-llvm-cxxfilt)
else
UTIL_REQUIRE_TOOLCHAIN_PROGS(CXXFILT, c++filt)
fi
;;
esac
])
@@ -1017,11 +966,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_MISC_CHECKS],
# Setup hotspot lecagy names for toolchains
HOTSPOT_TOOLCHAIN_TYPE=$TOOLCHAIN_TYPE
if test "x$TOOLCHAIN_TYPE" = xclang; then
if test "x$OPENJDK_TARGET_OS" = xaix; then
HOTSPOT_TOOLCHAIN_TYPE=xlc
else
HOTSPOT_TOOLCHAIN_TYPE=gcc
fi
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
HOTSPOT_TOOLCHAIN_TYPE=visCPP
fi

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -52,7 +52,7 @@ m4_include([util_paths.m4])
AC_DEFUN([UTIL_DEFUN_NAMED],
[
AC_DEFUN($1, [
m4_foreach([arg], m4_split(m4_normalize($2)), [
m4_foreach(arg, m4_split(m4_normalize($2)), [
m4_if(m4_bregexp(arg, [^\*]), -1,
[
m4_set_add(legal_named_args, arg)
@@ -64,18 +64,13 @@ AC_DEFUN([UTIL_DEFUN_NAMED],
)
])
# Delicate quoting and unquoting sequence to ensure the actual value is passed along unchanged
# For details on how this works, see https://git.openjdk.org/jdk/pull/11458#discussion_r1038173051
# WARNING: Proceed at the risk of your own sanity, getting this to work has made me completely
# incapable of feeling love or any other positive emotion
# ~Julian
m4_foreach([arg], m4_dquote(m4_dquote_elt($3)), [
m4_if(m4_index(arg, [: ]), -1, [m4_define([arg], m4_dquote(m4_bpatsubst(m4_dquote(arg), [:], [: ])))])
m4_define(arg_name, m4_substr(arg, 0, m4_index(arg, [: ])))
m4_foreach([arg], [$3], [
m4_if(m4_bregexp(arg, [: ]), -1, m4_define([arg], m4_bpatsubst(arg, [:], [: ])))
m4_define(arg_name, m4_substr(arg, 0, m4_bregexp(arg, [: ])))
m4_set_contains(legal_named_args, arg_name, [],[AC_MSG_ERROR([Internal error: m4_if(arg_name, , arg, arg_name) is not a valid named argument to [$1]. Valid arguments are 'm4_set_contents(defined_args, [ ]) m4_set_contents(legal_named_args, [ ])'.])])
m4_set_remove(required_named_args, arg_name)
m4_set_remove(legal_named_args, arg_name)
m4_pushdef([ARG_][]arg_name, m4_bpatsubst(m4_bpatsubst(m4_dquote(m4_dquote(arg)), arg_name[: ]), [^\s*]))
m4_pushdef([ARG_][]arg_name, m4_bpatsubst(m4_substr(arg, m4_incr(m4_incr(m4_bregexp(arg, [: ])))), [^\s*], []))
m4_set_add(defined_args, arg_name)
m4_undefine([arg_name])
])
@@ -199,7 +194,7 @@ AC_DEFUN([UTIL_GET_NON_MATCHING_VALUES],
if test -z "$legal_values"; then
$1="$2"
else
result=`$GREP -Fvx -- "$legal_values" <<< "$values_to_check" | $GREP -v '^$'`
result=`$GREP -Fvx "$legal_values" <<< "$values_to_check" | $GREP -v '^$'`
$1=${result//$'\n'/ }
fi
])
@@ -226,7 +221,7 @@ AC_DEFUN([UTIL_GET_MATCHING_VALUES],
if test -z "$illegal_values"; then
$1=""
else
result=`$GREP -Fx -- "$illegal_values" <<< "$values_to_check" | $GREP -v '^$'`
result=`$GREP -Fx "$illegal_values" <<< "$values_to_check" | $GREP -v '^$'`
$1=${result//$'\n'/ }
fi
])
@@ -381,18 +376,18 @@ UTIL_DEFUN_NAMED([UTIL_ARG_ENABLE],
m4_define(ARG_GIVEN, m4_translit(ARG_NAME, [a-z-], [A-Z_])[_GIVEN])
# If DESC is not specified, set it to a generic description.
m4_define([ARG_DESC], m4_if(m4_quote(ARG_DESC), , [[Enable the ARG_NAME feature]], [m4_normalize(ARG_DESC)]))
m4_define([ARG_DESC], m4_if(ARG_DESC, , [Enable the ARG_NAME feature], m4_normalize(ARG_DESC)))
# If CHECKING_MSG is not specified, set it to a generic description.
m4_define([ARG_CHECKING_MSG], m4_if(m4_quote(ARG_CHECKING_MSG), , [[for --enable-ARG_NAME]], [m4_normalize(ARG_CHECKING_MSG)]))
m4_define([ARG_CHECKING_MSG], m4_if(ARG_CHECKING_MSG, , [for --enable-ARG_NAME], m4_normalize(ARG_CHECKING_MSG)))
# If the code blocks are not given, set them to the empty statements to avoid
# tripping up bash.
m4_if(ARG_CHECK_AVAILABLE, , [m4_define([ARG_CHECK_AVAILABLE], [:])])
m4_if(ARG_IF_GIVEN, , [m4_define([ARG_IF_GIVEN], [:])])
m4_if(ARG_IF_NOT_GIVEN, , [m4_define([ARG_IF_NOT_GIVEN], [:])])
m4_if(ARG_IF_ENABLED, , [m4_define([ARG_IF_ENABLED], [:])])
m4_if(ARG_IF_DISABLED, , [m4_define([ARG_IF_DISABLED], [:])])
m4_define([ARG_CHECK_AVAILABLE], m4_if(ARG_CHECK_AVAILABLE, , :, ARG_CHECK_AVAILABLE))
m4_define([ARG_IF_GIVEN], m4_if(ARG_IF_GIVEN, , :, ARG_IF_GIVEN))
m4_define([ARG_IF_NOT_GIVEN], m4_if(ARG_IF_NOT_GIVEN, , :, ARG_IF_NOT_GIVEN))
m4_define([ARG_IF_ENABLED], m4_if(ARG_IF_ENABLED, , :, ARG_IF_ENABLED))
m4_define([ARG_IF_DISABLED], m4_if(ARG_IF_DISABLED, , :, ARG_IF_DISABLED))
##########################
# Part 2: Set up autoconf shell code
@@ -655,21 +650,21 @@ UTIL_DEFUN_NAMED([UTIL_ARG_WITH],
m4_define(ARG_GIVEN, m4_translit(ARG_NAME, [a-z-], [A-Z_])[_GIVEN])
# If DESC is not specified, set it to a generic description.
m4_define([ARG_DESC], m4_if(m4_quote(ARG_DESC), , [[Give a value for the ARG_NAME feature]], [m4_normalize(ARG_DESC)]))
m4_define([ARG_DESC], m4_if(ARG_DESC, , [Give a value for the ARG_NAME feature], m4_normalize(ARG_DESC)))
# If CHECKING_MSG is not specified, set it to a generic description.
m4_define([ARG_CHECKING_MSG], m4_if(m4_quote(ARG_CHECKING_MSG), , [[for --with-ARG_NAME]], [m4_normalize(ARG_CHECKING_MSG)]))
m4_define([ARG_CHECKING_MSG], m4_if(ARG_CHECKING_MSG, , [for --with-ARG_NAME], m4_normalize(ARG_CHECKING_MSG)))
m4_define([ARG_HAS_AUTO_BLOCK], m4_if(ARG_IF_AUTO, , false, true))
# If the code blocks are not given, set them to the empty statements to avoid
# tripping up bash.
m4_if(ARG_CHECK_AVAILABLE, , [m4_define([ARG_CHECK_AVAILABLE], [:])])
m4_if(ARG_CHECK_VALUE, , [m4_define([ARG_CHECK_VALUE], [:])])
m4_if(ARG_CHECK_FOR_FILES, , [m4_define([ARG_CHECK_FOR_FILES], [:])])
m4_if(ARG_IF_AUTO, , [m4_define([ARG_IF_AUTO], [:])])
m4_if(ARG_IF_GIVEN, , [m4_define([ARG_IF_GIVEN], [:])])
m4_if(ARG_IF_NOT_GIVEN, , [m4_define([ARG_IF_NOT_GIVEN], [:])])
m4_define([ARG_CHECK_AVAILABLE], m4_if(ARG_CHECK_AVAILABLE, , :, ARG_CHECK_AVAILABLE))
m4_define([ARG_CHECK_VALUE], m4_if(ARG_CHECK_VALUE, , :, ARG_CHECK_VALUE))
m4_define([ARG_CHECK_FOR_FILES], m4_if(ARG_CHECK_FOR_FILES, , :, ARG_CHECK_FOR_FILES))
m4_define([ARG_IF_AUTO], m4_if(ARG_IF_AUTO, , :, ARG_IF_AUTO))
m4_define([ARG_IF_GIVEN], m4_if(ARG_IF_GIVEN, , :, ARG_IF_GIVEN))
m4_define([ARG_IF_NOT_GIVEN], m4_if(ARG_IF_NOT_GIVEN, , :, ARG_IF_NOT_GIVEN))
##########################
# Part 2: Set up autoconf shell code
@@ -818,12 +813,3 @@ AC_DEFUN([UTIL_CHECK_STRING_NON_EMPTY],
FAILURE="Value cannot be empty"
fi
])
AC_DEFUN([UTIL_CHECK_STRING_NON_EMPTY_PRINTABLE],
[
if test "x$RESULT" = x; then
FAILURE="Value cannot be empty"
elif [ ! [[ $RESULT =~ ^[[:print:]]*$ ]] ]; then
FAILURE="Value contains non-printing characters: $RESULT"
fi
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -118,24 +118,6 @@ AC_DEFUN([UTIL_FIXUP_PATH],
fi
])
##############################################################################
# Fixup path to be a Windows full long path
# Note: Only supported with cygwin/msys2 (cygpath tool)
AC_DEFUN([UTIL_FIXUP_WIN_LONG_PATH],
[
# Only process if variable expands to non-empty
path="[$]$1"
if test "x$path" != x; then
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
win_path=$($PATHTOOL -wl "$path")
if test "x$win_path" != "x$path"; then
$1="$win_path"
fi
fi
fi
])
###############################################################################
# Check if the given file is a unix-style or windows-style executable, that is,
# if it expects paths in unix-style or windows-style.
@@ -539,7 +521,7 @@ AC_DEFUN([UTIL_REMOVE_SYMBOLIC_LINKS],
sym_link_dir=`pwd -P`
# Resolve file symlinks
while test $COUNTER -lt 20; do
ISLINK=`$LS -l $sym_link_dir/$sym_link_file | $GREP -e '->' | $SED -e 's/.*-> \(.*\)/\1/'`
ISLINK=`$LS -l $sym_link_dir/$sym_link_file | $GREP '\->' | $SED -e 's/.*-> \(.*\)/\1/'`
if test "x$ISLINK" == x; then
# This is not a symbolic link! We are done!
break

View File

@@ -219,35 +219,31 @@ define SetupJavaCompilationBody
# Use java server if it is enabled, and the user does not want a specialized
# class path.
ifeq ($$(ENABLE_JAVAC_SERVER)+$$($1_CLASSPATH), true+)
$1_JAVAC := $$(INTERIM_LANGTOOLS_ARGS) -m jdk.compiler.interim/com.sun.tools.sjavac.Main
# Create a configuration file with the needed information for the javac
# server to function properly.
$1_JAVAC_SERVER_CONFIG := $$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$$($1_SAFE_NAME)-javacserver.conf
# Arguments needed to launch the javacserver client, as well as for the
# client to launch the server.
$1_JAVAC_SERVER_ARGS := $$(INTERIM_LANGTOOLS_ARGS) \
-cp $(BUILDTOOLS_OUTPUTDIR)/langtools_javacserver_classes
$1_JAVAC_SERVER_CONFIG := $$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$$($1_SAFE_NAME)-server.conf
# The portfile contains the tcp/ip on which the server listens
# and the cookie necessary to talk to the server.
$1_JAVAC_PORT_FILE := $$(call FixPath, $$(JAVAC_SERVER_DIR)/server.port)
# The javacmd tells the client how to run java to launch the server.
$1_JAVAC_SERVER_JAVA_CMD := $$(call FixPath, $$(JAVA) $$($1_JAVA_FLAGS) \
$$($1_JAVAC_SERVER_ARGS))
# The servercmd specifies how to launch the server. This will be executed
# by the client, if needed.
$1_JAVAC_SERVER_CMD := $$(call FixPath, $$(JAVA) $$($1_JAVA_FLAGS) $$($1_JAVAC))
$1_CONFIG_VARDEPS := $$($1_JAVAC_PORT_FILE) $$($1_JAVAC_SERVER_JAVA_CMD)
$1_CONFIG_VARDEPS := $$($1_JAVAC_PORT_FILE) $$($1_JAVAC_SERVER_CMD)
$1_CONFIG_VARDEPS_FILE := $$(call DependOnVariable, $1_CONFIG_VARDEPS, \
$$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$1.config_vardeps)
# Write these values to a config file
$$($1_JAVAC_SERVER_CONFIG): $$($1_CONFIG_VARDEPS_FILE)
$(ECHO) portfile=$$($1_JAVAC_PORT_FILE) > $$@
$(ECHO) javacmd=$$($1_JAVAC_SERVER_JAVA_CMD) >> $$@
$(ECHO) servercmd=$$($1_JAVAC_SERVER_CMD) >> $$@
# Always use small java to launch client
$1_JAVAC_CMD := $$(JAVA_SMALL) $$($1_JAVA_FLAGS) $$($1_JAVAC_SERVER_ARGS) \
javacserver.Main --conf=$$($1_JAVAC_SERVER_CONFIG)
$1_JAVAC_CMD := $$(JAVA_SMALL) $$($1_JAVA_FLAGS) $$($1_JAVAC) \
--server:conf=$$($1_JAVAC_SERVER_CONFIG)
else
# No javac server
$1_JAVAC := $$(INTERIM_LANGTOOLS_ARGS) -m jdk.compiler.interim/com.sun.tools.javac.Main

View File

@@ -98,7 +98,7 @@ GLOBAL_VERSION_INFO_RESOURCE := $(TOPDIR)/src/java.base/windows/native/common/ve
JDK_RCFLAGS=$(RCFLAGS) \
-D"JDK_VERSION_STRING=$(VERSION_STRING)" \
-D"JDK_COMPANY=$(JDK_RC_COMPANY_NAME)" \
-D"JDK_COMPANY=$(COMPANY_NAME)" \
-D"JDK_VER=$(VERSION_NUMBER_FOUR_POSITIONS)" \
-D"JDK_COPYRIGHT=Copyright \xA9 $(COPYRIGHT_YEAR)" \
-D"JDK_NAME=$(JDK_RC_NAME) $(VERSION_SHORT)" \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -152,10 +152,6 @@ define SetupLogging
endif
endif
ifneq ($$(findstring $$(LOG_LEVEL), debug trace),)
SHELL := $$(SHELL) -x
endif
ifeq ($$(LOG_LEVEL), trace)
SHELL_NO_RECURSE := $$(SHELL)
# Shell redefinition trick inspired by http://www.cmcrossroads.com/ask-mr-make/6535-tracing-rule-execution-in-gnu-make
@@ -198,7 +194,7 @@ $(eval $(call SetupLogging))
################################################################################
MAX_PARAMS := 96
MAX_PARAMS := 36
PARAM_SEQUENCE := $(call sequence, 2, $(MAX_PARAMS))
# Template for creating a macro taking named parameters. To use it, assign the
@@ -223,7 +219,7 @@ define NamedParamsMacroTemplate
$(foreach i,$(PARAM_SEQUENCE), $(if $(strip $($i)),\
$(strip $1)_$(strip $(call EscapeHash, $(call DoubleDollar, $($i))))$(NEWLINE)))
# Debug print all named parameter names and values
$(if $(findstring $(LOG_LEVEL), trace), \
$(if $(findstring $(LOG_LEVEL),debug trace), \
$(info $0 $(strip $1) $(foreach i,$(PARAM_SEQUENCE), \
$(if $(strip $($i)),$(NEWLINE) $(strip [$i] $(if $(filter $(LOG_LEVEL), trace), \
$($i), $(wordlist 1, 20, $($(i))) $(if $(word 21, $($(i))), ...)))))))
@@ -289,7 +285,6 @@ ifeq ($(call isTargetOs, macosx), true)
$(CP) -fRP '$(call DecodeSpace, $<)' '$(call DecodeSpace, $@)'; \
fi
if [ -n "`$(XATTR) -ls '$(call DecodeSpace, $@)'`" ]; then \
$(CHMOD) -h u+w '$(call DecodeSpace, $@)'; \
$(XATTR) -cs '$(call DecodeSpace, $@)'; \
fi
endef
@@ -311,36 +306,17 @@ endef
# There are two versions, either creating a relative or an absolute link. Be
# careful when using this on Windows since the symlink created is only valid in
# the unix emulation environment.
# In msys2 we use mklink /J because its ln would perform a deep copy of the target.
# This inhibits performance and can lead to issues with long paths. With mklink /J
# relative linking does not work, so we handle the link as absolute path.
ifeq ($(OPENJDK_BUILD_OS_ENV), windows.msys2)
define link-file-relative
$(call MakeTargetDir)
$(RM) '$(call DecodeSpace, $@)'
cmd //c "mklink /J $(call FixPath, $(call DecodeSpace, $@)) $(call FixPath, $(call DecodeSpace, $<))"
endef
else
define link-file-relative
define link-file-relative
$(call MakeTargetDir)
$(RM) '$(call DecodeSpace, $@)'
$(LN) -s '$(call DecodeSpace, $(call RelativePath, $<, $(@D)))' '$(call DecodeSpace, $@)'
endef
endif
endef
ifeq ($(OPENJDK_BUILD_OS_ENV), windows.msys2)
define link-file-absolute
$(call MakeTargetDir)
$(RM) '$(call DecodeSpace, $@)'
cmd //c "mklink /J $(call FixPath, $(call DecodeSpace, $@)) $(call FixPath, $(call DecodeSpace, $<))"
endef
else
define link-file-absolute
define link-file-absolute
$(call MakeTargetDir)
$(RM) '$(call DecodeSpace, $@)'
$(LN) -s '$(call DecodeSpace, $<)' '$(call DecodeSpace, $@)'
endef
endif
endef
################################################################################

View File

@@ -257,7 +257,7 @@ ifeq ($(HAS_FILE_FUNCTION), true)
else
# Use printf to get consistent behavior on all platforms.
WriteFile = \
$(shell $(PRINTF) "%s\n" $(strip $(call ShellQuote, $1)) > $2)
$(shell $(PRINTF) "%s" $(call ShellQuote, $1) > $2)
endif
# Param 1 - Text to write
@@ -268,5 +268,5 @@ ifeq ($(HAS_FILE_FUNCTION), true)
else
# Use printf to get consistent behavior on all platforms.
AppendFile = \
$(shell $(PRINTF) "%s\n" $(strip $(call ShellQuote, $1)) >> $2)
$(shell $(PRINTF) "%s" $(call ShellQuote, $1) >> $2)
endif

View File

@@ -48,12 +48,12 @@ define GetSymbols
$(SED) -e 's/#.*//;s/global://;s/local://;s/\;//;s/^[ ]*/_/;/^_$$$$/d' | \
$(EGREP) -v "JNI_OnLoad|JNI_OnUnload|Agent_OnLoad|Agent_OnUnload|Agent_OnAttach" > \
$$(@D)/$$(basename $$(@F)).symbols || true; \
$(NM) $(NMFLAGS) $$($1_TARGET) | $(GREP) " T " | \
$(NM) $$($1_TARGET) | $(GREP) " T " | \
$(EGREP) "JNI_OnLoad|JNI_OnUnload|Agent_OnLoad|Agent_OnUnload|Agent_OnAttach" | \
$(CUT) -d ' ' -f 3 >> $$(@D)/$$(basename $$(@F)).symbols || true;\
else \
$(ECHO) "Getting symbols from nm"; \
$(NM) $(NMFLAGS) -m $$($1_TARGET) | $(GREP) "__TEXT" | \
$(NM) -m $$($1_TARGET) | $(GREP) "__TEXT" | \
$(EGREP) -v "non-external|private extern|__TEXT,__eh_frame" | \
$(SED) -e 's/.* //' > $$(@D)/$$(basename $$(@F)).symbols; \
fi
@@ -215,21 +215,7 @@ DEPENDENCY_TARGET_SED_PATTERN := \
# The fix-deps-file macro is used to adjust the contents of the generated make
# dependency files to contain paths compatible with make.
#
REWRITE_PATHS_RELATIVE = false
ifeq ($(ALLOW_ABSOLUTE_PATHS_IN_OUTPUT)-$(FILE_MACRO_CFLAGS), false-)
REWRITE_PATHS_RELATIVE = true
endif
# CCACHE_BASEDIR needs fix-deps-file as makefiles use absolute filenames for
# object files while CCACHE_BASEDIR will make ccache relativize all paths for
# its compiler. The compiler then produces relative dependency files.
# make does not know a relative and absolute filename is the same so it will
# ignore such dependencies.
ifneq ($(CCACHE), )
REWRITE_PATHS_RELATIVE = true
endif
ifeq ($(REWRITE_PATHS_RELATIVE), true)
# Need to handle -I flags as both '-Ifoo' and '-I foo'.
MakeCommandRelative = \
$(CD) $(WORKSPACE_ROOT) && \
@@ -359,25 +345,22 @@ define SetupCompileNativeFileBody
ifneq ($(DISABLE_WARNING_PREFIX), )
$1_WARNINGS_FLAGS := $$(addprefix $(DISABLE_WARNING_PREFIX), \
$$($$($1_BASE)_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)_$$($1_FILENAME)) \
$$($$($1_BASE)_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS)_$$($1_FILENAME)))
$$($$($1_BASE)_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)_$$($1_FILENAME)))
endif
$1_BASE_CFLAGS := $$($$($1_BASE)_CFLAGS) $$($$($1_BASE)_EXTRA_CFLAGS) \
$$($$($1_BASE)_SYSROOT_CFLAGS)
$$($$($1_BASE)_SYSROOT_CFLAGS) $$($1_WARNINGS_FLAGS)
$1_BASE_CXXFLAGS := $$($$($1_BASE)_CXXFLAGS) $$($$($1_BASE)_EXTRA_CXXFLAGS) \
$$($$($1_BASE)_SYSROOT_CFLAGS) $$($1_EXTRA_CXXFLAGS)
$$($$($1_BASE)_SYSROOT_CFLAGS) $$($1_EXTRA_CXXFLAGS) $$($1_WARNINGS_FLAGS)
$1_BASE_ASFLAGS := $$($$($1_BASE)_ASFLAGS) $$($$($1_BASE)_EXTRA_ASFLAGS)
ifneq ($$(filter %.c, $$($1_FILENAME)), )
# Compile as a C file
$1_CFLAGS += $$($1_WARNINGS_FLAGS)
$1_FLAGS := $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) $$($1_BASE_CFLAGS) \
$$($1_OPT_CFLAGS) $$($1_CFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CC)
else ifneq ($$(filter %.m, $$($1_FILENAME)), )
# Compile as an Objective-C file
$1_CFLAGS += $$($1_WARNINGS_FLAGS)
$1_FLAGS := -x objective-c $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) \
$$($1_BASE_CFLAGS) $$($1_OPT_CFLAGS) $$($1_CFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CC)
@@ -401,7 +384,6 @@ define SetupCompileNativeFileBody
endif
else ifneq ($$(filter %.cpp %.cc %.mm, $$($1_FILENAME)), )
# Compile as a C++ or Objective-C++ file
$1_CXXFLAGS += $$($1_WARNINGS_FLAGS)
$1_FLAGS := $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) $$($1_BASE_CXXFLAGS) \
$$($1_OPT_CXXFLAGS) $$($1_CXXFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CXX)
@@ -503,19 +485,7 @@ endef
# CFLAGS the compiler flags to be used, used both for C and C++.
# CXXFLAGS the compiler flags to be used for c++, if set overrides CFLAGS.
# LDFLAGS the linker flags to be used, used both for C and C++.
# LDFLAGS_<toolchain> the linker flags to be used for the specified toolchain,
# used both for C and C++.
# LDFLAGS_<OS> the linker flags to be used for the specified target OS,
# used both for C and C++.
# LDFLAGS_<toolchain>_<OS> the linker flags to be used for the specified
# toolchain and target OS, used both for C and C++.
# LIBS the libraries to link to
# LIBS_<OS> the libraries to link to for the specified target OS,
# used both for C and C++.
# LIBS_<toolchain> the libraries to link to for the specified toolchain,
# used both for C and C++.
# LIBS_<OS>_<toolchain> the libraries to link to for the specified target
# OS and toolchain, used both for C and C++.
# ARFLAGS the archiver flags to be used
# OBJECT_DIR the directory where we store the object files
# OUTPUT_DIR the directory where the resulting binary is put
@@ -538,20 +508,10 @@ endef
# LD the linker to use, default is $(LD)
# OPTIMIZATION sets optimization level to NONE, LOW, HIGH, HIGHEST, HIGHEST_JVM, SIZE
# DISABLED_WARNINGS_<toolchain> Disable the given warnings for the specified toolchain
# DISABLED_WARNINGS_<toolchain>_<OS> Disable the given warnings for the specified
# toolchain and target OS
# DISABLED_WARNINGS_C_<toolchain> Disable the given warnings for the specified toolchain
# when compiling C code
# DISABLED_WARNINGS_C_<toolchain>_<OS> Disable the given warnings for the specified
# toolchain and target OS when compiling C code
# DISABLED_WARNINGS_CXX_<toolchain> Disable the given warnings for the specified
# toolchain when compiling C++ code
# DISABLED_WARNINGS_CXX_<toolchain>_<OS> Disable the given warnings for the specified
# toolchain and target OS when compiling C++ code
# DISABLED_WARNINGS_<toolchain>_<filename> Disable the given warnings for the specified
# toolchain when compiling the file specified by filename
# DISABLED_WARNINGS_<toolchain>_<OS>_<filename> Disable the given warnings for the specified
# toolchain and target OS when compiling the file specified by filename
# STRIP_SYMBOLS Set to false to override global strip policy and always leave
# symbols in the binary, if the toolchain allows for it
# DEBUG_SYMBOLS Set to false to disable generation of debug symbols
@@ -561,7 +521,6 @@ endef
# STRIPFLAGS Optionally change the flags given to the strip command
# PRECOMPILED_HEADER Header file to use as precompiled header
# PRECOMPILED_HEADER_EXCLUDE List of source files that should not use PCH
# BUILD_INFO_LOG_MACRO Overrides log level of the build info log message, default LogWarn
#
# After being called, some variables are exported from this macro, all prefixed
# with parameter 1 followed by a '_':
@@ -716,19 +675,6 @@ define SetupNativeCompilationBody
$$(error No sources found for $1 when looking inside the dirs $$($1_SRC))
endif
ifeq ($$($1_TYPE), EXECUTABLE)
ifeq ($(UBSAN_ENABLED), true)
# We need to set the default options for UBSan. This needs to be included in every executable.
# Rather than copy and paste code to everything with a main function, we add an additional
# source file to every executable that exports __ubsan_default_options.
ifneq ($$(filter %.cpp %.cc, $$($1_SRCS)), )
$1_SRCS += $(TOPDIR)/make/data/ubsan/ubsan_default_options.cpp
else
$1_SRCS += $(TOPDIR)/make/data/ubsan/ubsan_default_options.c
endif
endif
endif
# Calculate the expected output from compiling the sources
$1_EXPECTED_OBJS_FILENAMES := $$(call replace_with_obj_extension, $$(notdir $$($1_SRCS)))
$1_EXPECTED_OBJS := $$(addprefix $$($1_OBJECT_DIR)/, $$($1_EXPECTED_OBJS_FILENAMES))
@@ -741,12 +687,10 @@ define SetupNativeCompilationBody
# Sort to remove duplicates and provide a reproducible order on the input files to the linker.
$1_ALL_OBJS := $$(sort $$($1_EXPECTED_OBJS) $$($1_EXTRA_OBJECT_FILES))
# Pickup extra OPENJDK_TARGET_OS_TYPE, OPENJDK_TARGET_OS, TOOLCHAIN_TYPE and
# OPENJDK_TARGET_OS plus OPENJDK_TARGET_CPU pair dependent variables for CFLAGS.
# Pickup extra OPENJDK_TARGET_OS_TYPE, OPENJDK_TARGET_OS, and/or OPENJDK_TARGET_OS plus
# OPENJDK_TARGET_CPU pair dependent variables for CFLAGS.
$1_EXTRA_CFLAGS := $$($1_CFLAGS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_CFLAGS_$(OPENJDK_TARGET_OS)) \
$$($1_CFLAGS_$(TOOLCHAIN_TYPE)) \
$$($1_CFLAGS_$(OPENJDK_TARGET_OS)_$(OPENJDK_TARGET_CPU))
ifneq ($(DEBUG_LEVEL), release)
# Pickup extra debug dependent variables for CFLAGS
$1_EXTRA_CFLAGS += $$($1_CFLAGS_debug)
@@ -763,11 +707,8 @@ define SetupNativeCompilationBody
$1_EXTRA_CFLAGS += $$(STATIC_LIBS_CFLAGS)
endif
# Pickup extra OPENJDK_TARGET_OS_TYPE, OPENJDK_TARGET_OS and/or TOOLCHAIN_TYPE
# dependent variables for CXXFLAGS.
$1_EXTRA_CXXFLAGS := $$($1_CXXFLAGS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_CXXFLAGS_$(OPENJDK_TARGET_OS)) \
$$($1_CXXFLAGS_$(TOOLCHAIN_TYPE))
# Pickup extra OPENJDK_TARGET_OS_TYPE and/or OPENJDK_TARGET_OS dependent variables for CXXFLAGS.
$1_EXTRA_CXXFLAGS := $$($1_CXXFLAGS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_CXXFLAGS_$(OPENJDK_TARGET_OS))
ifneq ($(DEBUG_LEVEL), release)
# Pickup extra debug dependent variables for CXXFLAGS
$1_EXTRA_CXXFLAGS += $$($1_CXXFLAGS_debug)
@@ -810,16 +751,12 @@ define SetupNativeCompilationBody
$$(DISABLED_WARNINGS) \
$$(DISABLED_WARNINGS_C) \
$$($1_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)) \
$$($1_DISABLED_WARNINGS_C_$(TOOLCHAIN_TYPE)) \
$$($1_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS)) \
$$($1_DISABLED_WARNINGS_C_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS)))
$$($1_DISABLED_WARNINGS_C_$(TOOLCHAIN_TYPE)))
$1_EXTRA_CXXFLAGS += $$(addprefix $(DISABLE_WARNING_PREFIX), \
$$(DISABLED_WARNINGS) \
$$(DISABLED_WARNINGS_CXX) \
$$($1_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)) \
$$($1_DISABLED_WARNINGS_CXX_$(TOOLCHAIN_TYPE)) \
$$($1_DISABLED_WARNINGS_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS)) \
$$($1_DISABLED_WARNINGS_CXX_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS)))
$$($1_DISABLED_WARNINGS_CXX_$(TOOLCHAIN_TYPE)))
endif
# Check if warnings should be considered errors.
@@ -946,18 +883,12 @@ define SetupNativeCompilationBody
# Setup rule for printing progress info when compiling source files.
# This is a rough heuristic and may not always print accurate information.
# The $1_BUILD_INFO and $1_BUILD_INFO_DEPS variables are used in
# TestFilesCompilation.gmk.
$$(call SetIfEmpty, $1_BUILD_INFO_LOG_MACRO, LogWarn)
$1_BUILD_INFO_DEPS := $$($1_SRCS) $$($1_COMPILE_VARDEPS_FILE)
$$($1_BUILD_INFO): $$($1_BUILD_INFO_DEPS)
$$($1_BUILD_INFO): $$($1_SRCS) $$($1_COMPILE_VARDEPS_FILE)
ifeq ($$(wildcard $$($1_TARGET)), )
$$(call $$($1_BUILD_INFO_LOG_MACRO), \
Creating $$(subst $$(OUTPUTDIR)/,,$$($1_TARGET)) from $$(words \
$$(call LogWarn, Creating $$(subst $$(OUTPUTDIR)/,,$$($1_TARGET)) from $$(words \
$$(filter-out %.vardeps, $$?)) file(s))
else
$$(call $$($1_BUILD_INFO_LOG_MACRO), \
$$(strip Updating $$(subst $$(OUTPUTDIR)/,,$$($1_TARGET)) \
$$(call LogWarn, $$(strip Updating $$(subst $$(OUTPUTDIR)/,,$$($1_TARGET)) \
$$(if $$(filter-out %.vardeps, $$?), \
due to $$(words $$(filter-out %.vardeps, $$?)) file(s), \
$$(if $$(filter %.vardeps, $$?), due to makefile changes))))
@@ -1036,15 +967,10 @@ define SetupNativeCompilationBody
$1_REAL_MAPFILE := $$($1_MAPFILE)
endif
# Pickup extra OPENJDK_TARGET_OS_TYPE, OPENJDK_TARGET_OS and TOOLCHAIN_TYPE
# dependent variables for LDFLAGS and LIBS, and additionally the pair dependent
# TOOLCHAIN_TYPE plus OPENJDK_TARGET_OS for LDFLAGS, or OPENJDK_TARGET_OS plus
# TOOLCHAIN_TYPE for LIBS
$1_EXTRA_LDFLAGS += $$($1_LDFLAGS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_LDFLAGS_$(OPENJDK_TARGET_OS)) \
$$($1_LDFLAGS_$(TOOLCHAIN_TYPE)) $$($1_LDFLAGS_$(TOOLCHAIN_TYPE)_$(OPENJDK_TARGET_OS))
$1_EXTRA_LIBS += $$($1_LIBS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_LIBS_$(OPENJDK_TARGET_OS)) \
$$($1_LIBS_$(OPENJDK_TARGET_OS)_$(TOOLCHAIN_TYPE)) $$($1_LIBS_$(TOOLCHAIN_TYPE))
# Pickup extra OPENJDK_TARGET_OS_TYPE and/or OPENJDK_TARGET_OS dependent variables
# for LDFLAGS and LIBS
$1_EXTRA_LDFLAGS += $$($1_LDFLAGS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_LDFLAGS_$(OPENJDK_TARGET_OS))
$1_EXTRA_LIBS += $$($1_LIBS_$(OPENJDK_TARGET_OS_TYPE)) $$($1_LIBS_$(OPENJDK_TARGET_OS))
ifneq ($$($1_REAL_MAPFILE), )
$1_EXTRA_LDFLAGS += $(call SET_SHARED_LIBRARY_MAPFILE,$$($1_REAL_MAPFILE))
endif
@@ -1064,24 +990,21 @@ define SetupNativeCompilationBody
ifneq ($$($1_TYPE), STATIC_LIBRARY)
# Generate debuginfo files.
ifeq ($(call isTargetOs, windows), true)
$1_EXTRA_LDFLAGS += -debug "-pdb:$$($1_SYMBOLS_DIR)/$$($1_BASENAME).pdb" \
"-map:$$($1_SYMBOLS_DIR)/$$($1_BASENAME).map"
$1_EXTRA_LDFLAGS += -debug "-pdb:$$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).pdb" \
"-map:$$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).map"
ifeq ($(SHIP_DEBUG_SYMBOLS), public)
$1_EXTRA_LDFLAGS += "-pdbstripped:$$($1_SYMBOLS_DIR)/$$($1_BASENAME).stripped.pdb"
$1_EXTRA_LDFLAGS += "-pdbstripped:$$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).stripped.pdb"
endif
$1_DEBUGINFO_FILES := $$($1_SYMBOLS_DIR)/$$($1_BASENAME).pdb \
$$($1_SYMBOLS_DIR)/$$($1_BASENAME).map
$1_DEBUGINFO_FILES := $$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).pdb \
$$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).map
else ifeq ($(call isTargetOs, linux), true)
$1_DEBUGINFO_FILES := $$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).debuginfo
# Setup the command line creating debuginfo files, to be run after linking.
# It cannot be run separately since it updates the original target file
# Creating the debuglink is done in another command rather than all at once
# so we can run it after strip is called, since strip can sometimes mangle the
# embedded debuglink, which we want to avoid.
$1_CREATE_DEBUGINFO_CMDS := \
$$($1_OBJCOPY) --only-keep-debug $$($1_TARGET) $$($1_DEBUGINFO_FILES) $$(NEWLINE)
$1_CREATE_DEBUGLINK_CMDS := $(CD) $$($1_SYMBOLS_DIR) && \
$$($1_OBJCOPY) --only-keep-debug $$($1_TARGET) $$($1_DEBUGINFO_FILES) $$(NEWLINE) \
$(CD) $$($1_SYMBOLS_DIR) && \
$$($1_OBJCOPY) --add-gnu-debuglink=$$($1_DEBUGINFO_FILES) $$($1_TARGET)
else ifeq ($(call isTargetOs, aix), true)
@@ -1118,11 +1041,7 @@ define SetupNativeCompilationBody
$1 += $$($1_DEBUGINFO_FILES)
ifeq ($$($1_ZIP_EXTERNAL_DEBUG_SYMBOLS), true)
ifeq ($(call isTargetOs, windows), true)
$1_DEBUGINFO_ZIP := $$($1_SYMBOLS_DIR)/$$($1_BASENAME).diz
else
$1_DEBUGINFO_ZIP := $$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).diz
endif
$1 += $$($1_DEBUGINFO_ZIP)
# The dependency on TARGET is needed for debuginfo files
@@ -1215,7 +1134,7 @@ define SetupNativeCompilationBody
$1_VARDEPS := $$($1_LD) $$($1_SYSROOT_LDFLAGS) $$($1_LDFLAGS) $$($1_EXTRA_LDFLAGS) \
$$($1_LIBS) $$($1_EXTRA_LIBS) $$($1_MT) \
$$($1_CREATE_DEBUGINFO_CMDS) $$($1_MANIFEST_VERSION) \
$$($1_STRIP_CMD) $$($1_CREATE_DEBUGLINK_CMDS)
$$($1_STRIP_CMD)
$1_VARDEPS_FILE := $$(call DependOnVariable, $1_VARDEPS, \
$$($1_OBJECT_DIR)/$$($1_NOSUFFIX).vardeps)
@@ -1278,7 +1197,6 @@ define SetupNativeCompilationBody
test "$$$$?" = "1" ; \
$$($1_CREATE_DEBUGINFO_CMDS)
$$($1_STRIP_CMD)
$$($1_CREATE_DEBUGLINK_CMDS)
ifeq ($(call isBuildOsEnv, windows.wsl2), true)
$$(CHMOD) +x $$($1_TARGET)
endif
@@ -1290,7 +1208,6 @@ define SetupNativeCompilationBody
$$($1_LIBS) $$($1_EXTRA_LIBS)) ; \
$$($1_CREATE_DEBUGINFO_CMDS)
$$($1_STRIP_CMD)
$$($1_CREATE_DEBUGLINK_CMDS)
endif
ifeq ($(call isTargetOs, windows), true)
ifneq ($$($1_MANIFEST), )

View File

@@ -57,7 +57,7 @@ define ProcessMarkdown
endif
ifneq ($$($1_CSS), )
ifneq ($$(findstring https:/, $$($1_CSS)), )
ifneq ($$(findstring http:/, $$($1_CSS)), )
$1_$2_CSS_OPTION := --css '$$($1_CSS)'
else
$1_$2_CSS := $$(strip $$(call RelativePath, $$($1_CSS), $$($1_$2_TARGET_DIR)))
@@ -84,7 +84,7 @@ define ProcessMarkdown
$$(call MakeDir, $$(SUPPORT_OUTPUTDIR)/markdown $$(dir $$($1_$2_PANDOC_OUTPUT)))
$$(call ExecuteWithLog, $$(SUPPORT_OUTPUTDIR)/markdown/$$($1_$2_MARKER), \
$$(PANDOC) $$($1_OPTIONS) -f $$(PANDOC_MARKDOWN_FLAG) \
-t $$($1_FORMAT) --eol=lf --standalone \
-t $$($1_FORMAT) --standalone \
$$($1_$2_CSS_OPTION) $$($1_$2_OPTIONS_FROM_SRC) $$($1_$2_OPTIONS) \
'$$($1_$2_PANDOC_INPUT)' -o '$$($1_$2_PANDOC_OUTPUT)')
ifneq ($$(findstring $$(LOG_LEVEL), debug trace),)

View File

@@ -45,7 +45,6 @@ include NativeCompilation.gmk
# SOURCE_DIRS A list of source directories to search
# OUTPUT_DIR Where to put the resulting files
# EXCLUDE A list of filenames to exclude from compilation
# EXTRA_FILES List of extra files not in SOURCE_DIRS
SetupTestFilesCompilation = $(NamedParamsMacroTemplate)
define SetupTestFilesCompilationBody
@@ -56,34 +55,22 @@ define SetupTestFilesCompilationBody
$$(error There are duplicate test file names for $1: $$($1_DUPLICATED_NAMES))
endif
# Always include common test functionality
TEST_CFLAGS := -I$(TOPDIR)/test/lib/native
ifeq ($(TOOLCHAIN_TYPE), gcc)
TEST_CFLAGS += -fvisibility=hidden
TEST_LDFLAGS += -Wl,--exclude-libs,ALL
else ifeq ($(TOOLCHAIN_TYPE), clang)
TEST_CFLAGS += -fvisibility=hidden
endif
# The list to depend on starts out empty
$1 :=
ifeq ($$($1_TYPE), LIBRARY)
$1_PREFIX = lib
$1_OUTPUT_SUBDIR := lib
$1_BASE_CFLAGS := $(CFLAGS_JDKLIB) $$(TEST_CFLAGS)
$1_BASE_CXXFLAGS := $(CXXFLAGS_JDKLIB) $$(TEST_CFLAGS)
$1_LDFLAGS := $(LDFLAGS_JDKLIB) $$(TEST_LDFLAGS) $$(call SET_SHARED_LIBRARY_ORIGIN)
$1_BASE_CFLAGS := $(CFLAGS_JDKLIB)
$1_BASE_CXXFLAGS := $(CXXFLAGS_JDKLIB)
$1_LDFLAGS := $(LDFLAGS_JDKLIB) $$(call SET_SHARED_LIBRARY_ORIGIN)
$1_COMPILATION_TYPE := LIBRARY
$1_LOG_TYPE := library
else ifeq ($$($1_TYPE), PROGRAM)
$1_PREFIX = exe
$1_OUTPUT_SUBDIR := bin
$1_BASE_CFLAGS := $(CFLAGS_JDKEXE) $$(TEST_CFLAGS)
$1_BASE_CXXFLAGS := $(CXXFLAGS_JDKEXE) $$(TEST_CFLAGS)
$1_LDFLAGS := $(LDFLAGS_JDKEXE) $$(TEST_LDFLAGS) $(LDFLAGS_TESTEXE)
$1_BASE_CFLAGS := $(CFLAGS_JDKEXE)
$1_BASE_CXXFLAGS := $(CXXFLAGS_JDKEXE)
$1_LDFLAGS := $(LDFLAGS_JDKEXE) $(LDFLAGS_TESTEXE)
$1_COMPILATION_TYPE := EXECUTABLE
$1_LOG_TYPE := executable
else
$$(error Unknown type: $$($1_TYPE))
endif
@@ -95,8 +82,6 @@ define SetupTestFilesCompilationBody
$1_EXCLUDE_PATTERN := $$(addprefix %/, $$($1_EXCLUDE))
$1_FILTERED_FILE_LIST := $$(filter-out $$($1_EXCLUDE_PATTERN), $$($1_FILE_LIST))
$1_BUILD_INFO := $$($1_OUTPUT_DIR)/_$1-build-info.marker
# Setup a compilation for each and every one of them
$$(foreach file, $$($1_FILTERED_FILE_LIST),\
$$(eval name := $$(strip $$(basename $$(notdir $$(file))))) \
@@ -104,7 +89,7 @@ define SetupTestFilesCompilationBody
$$(eval $$(call SetupNativeCompilation, BUILD_TEST_$$(name), \
NAME := $$(unprefixed_name), \
TYPE := $$($1_COMPILATION_TYPE), \
EXTRA_FILES := $$(file) $$($1_EXTRA_FILES), \
EXTRA_FILES := $$(file), \
OBJECT_DIR := $$($1_OUTPUT_DIR)/support/$$(name), \
OUTPUT_DIR := $$($1_OUTPUT_DIR)/$$($1_OUTPUT_SUBDIR), \
CFLAGS := $$($1_BASE_CFLAGS) $$($1_CFLAGS) $$($1_CFLAGS_$$(name)), \
@@ -118,21 +103,10 @@ define SetupTestFilesCompilationBody
OPTIMIZATION := $$(if $$($1_OPTIMIZATION_$$(name)),$$($1_OPTIMIZATION_$$(name)),LOW), \
COPY_DEBUG_SYMBOLS := false, \
STRIP_SYMBOLS := $$(if $$($1_STRIP_SYMBOLS_$$(name)),$$($1_STRIP_SYMBOLS_$$(name)),false), \
BUILD_INFO_LOG_MACRO := LogInfo, \
)) \
$$(eval $1 += $$(BUILD_TEST_$$(name)) ) \
$$(eval $1_BUILD_INFO_DEPS += $$(BUILD_TEST_$$(name)_BUILD_INFO_DEPS)) \
$$(eval $$(BUILD_TEST_$$(name)_BUILD_INFO): | $$($1_BUILD_INFO)) \
)
# Setup rule for printing a summary of all the tests being compiled. On Warn
# log level, this replaces the individual build info logging done by
# SetupNativeCompilation.
$$($1_BUILD_INFO): $$($1_BUILD_INFO_DEPS)
$$(call LogWarn, $$(strip Creating $$(words $$(filter-out %.vardeps, $$?)) \
test $$($1_LOG_TYPE) file(s) for $1))
$(TOUCH) $$@
endef
endif # _TEST_FILES_COMPILATION_GMK

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2023, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -130,6 +130,7 @@ define SetupBuildLauncherBody
$$(shell $(FIND) $(SUPPORT_OUTPUTDIR)/modules_libs/java.base -name "*.a") \
$(SUPPORT_OUTPUTDIR)/modules_libs/jdk.jdwp.agent/libdt_socket.a \
$(SUPPORT_OUTPUTDIR)/modules_libs/jdk.jdwp.agent/libjdwp.a \
$(SUPPORT_OUTPUTDIR)/native/java.base/$(LIBRARY_PREFIX)fdlibm$(STATIC_LIBRARY_SUFFIX) \
-framework CoreFoundation \
-framework Foundation \
-framework SystemConfiguration \
@@ -143,19 +144,9 @@ define SetupBuildLauncherBody
$1_WINDOWS_JLI_LIB := $(call FindStaticLib, java.base, jli, /libjli)
$1_EXTRA_FILES := $(LAUNCHER_SRC)/main.c
ifeq ($(ASAN_ENABLED), true)
$1_EXTRA_FILES += $(TOPDIR)/make/data/asan/asan_default_options.c
endif
ifeq ($(LSAN_ENABLED), true)
$1_EXTRA_FILES += $(TOPDIR)/make/data/lsan/lsan_default_options.c
endif
$$(eval $$(call SetupJdkExecutable, BUILD_LAUNCHER_$1, \
NAME := $1, \
EXTRA_FILES := $$($1_EXTRA_FILES), \
EXTRA_FILES := $(LAUNCHER_SRC)/main.c, \
OPTIMIZATION := $$($1_OPTIMIZATION), \
CFLAGS := $$(CFLAGS_JDKEXE) \
$$(LAUNCHER_CFLAGS) \

View File

@@ -41,12 +41,15 @@ ifeq ($(TOOLCHAIN_TYPE), gcc)
CFLAGS_JDKLIB += -fvisibility=hidden
CXXFLAGS_JDKLIB += -fvisibility=hidden
LDFLAGS_JDKLIB += -Wl,--exclude-libs,ALL
EXPORT_ALL_SYMBOLS := -fvisibility=default
else ifeq ($(TOOLCHAIN_TYPE), clang)
CFLAGS_JDKLIB += -fvisibility=hidden
CXXFLAGS_JDKLIB += -fvisibility=hidden
EXPORT_ALL_SYMBOLS := -fvisibility=default
else ifeq ($(TOOLCHAIN_TYPE), xlc)
CFLAGS_JDKLIB += -qvisibility=hidden
CXXFLAGS_JDKLIB += -qvisibility=hidden
EXPORT_ALL_SYMBOLS := -qvisibility=default
endif
# Put the libraries here.

View File

@@ -42,6 +42,7 @@ DOCS_MODULES= \
jdk.hotspot.agent \
jdk.httpserver \
jdk.jpackage \
jdk.incubator.concurrent \
jdk.incubator.vector \
jdk.jartool \
jdk.javadoc \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -25,21 +25,17 @@
# Versions and download locations for dependencies used by GitHub Actions (GHA)
GTEST_VERSION=1.14.0
JTREG_VERSION=7.3.1+1
GTEST_VERSION=1.8.1
JTREG_VERSION=7+1
LINUX_X64_BOOT_JDK_EXT=tar.gz
LINUX_X64_BOOT_JDK_URL=https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.6%2B7/OpenJDK21U-jdk_x64_linux_hotspot_21.0.6_7.tar.gz
LINUX_X64_BOOT_JDK_SHA256=a2650fba422283fbed20d936ce5d2a52906a5414ec17b2f7676dddb87201dbae
MACOS_AARCH64_BOOT_JDK_EXT=tar.gz
MACOS_AARCH64_BOOT_JDK_URL=https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.6%2B7/OpenJDK21U-jdk_aarch64_mac_hotspot_21.0.6_7.tar.gz
MACOS_AARCH64_BOOT_JDK_SHA256=4ef4083919126a3d93e603284b405c7493905497485a92b375f5d6c3e8f7e8f2
LINUX_X64_BOOT_JDK_URL=https://download.java.net/java/GA/jdk19/877d6127e982470ba2a7faa31cc93d04/36/GPL/openjdk-19_linux-x64_bin.tar.gz
LINUX_X64_BOOT_JDK_SHA256=f47aba585cfc9ecff1ed8e023524e8309f4315ed8b80100b40c7dcc232c12f96
MACOS_X64_BOOT_JDK_EXT=tar.gz
MACOS_X64_BOOT_JDK_URL=https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.6%2B7/OpenJDK21U-jdk_x64_mac_hotspot_21.0.6_7.tar.gz
MACOS_X64_BOOT_JDK_SHA256=7aacfc400078ad65b7c7de3ec75ff74bf5c2077d6740b350f85ae10be4f71e76
MACOS_X64_BOOT_JDK_URL=https://download.java.net/java/GA/jdk19/877d6127e982470ba2a7faa31cc93d04/36/GPL/openjdk-19_macos-x64_bin.tar.gz
MACOS_X64_BOOT_JDK_SHA256=bfd33f5b2590fd552ae2d9231340c6b4704a872f927dce1c52860b78c49a5a11
WINDOWS_X64_BOOT_JDK_EXT=zip
WINDOWS_X64_BOOT_JDK_URL=https://github.com/adoptium/temurin21-binaries/releases/download/jdk-21.0.6%2B7/OpenJDK21U-jdk_x64_windows_hotspot_21.0.6_7.zip
WINDOWS_X64_BOOT_JDK_SHA256=897c8eebb0f85a99ccecbd482ebae9a45d88c19d6077054f6529ebab49b6d259
WINDOWS_X64_BOOT_JDK_URL=https://download.java.net/java/GA/jdk19/877d6127e982470ba2a7faa31cc93d04/36/GPL/openjdk-19_windows-x64_bin.zip
WINDOWS_X64_BOOT_JDK_SHA256=8fabcee7c4e8d3b53486777ecd27bb906d67d7c1efd1bf22a8290cf659afa487

View File

@@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -67,7 +67,6 @@
* input.build_osenv
* input.build_osenv_cpu
* input.build_osenv_platform
* input.build_osenv_version
*
* For more complex nested attributes, there is a method "get":
*
@@ -242,15 +241,14 @@ var getJibProfilesCommon = function (input, data) {
common.main_profile_names = [
"linux-x64", "linux-x86", "macosx-x64", "macosx-aarch64",
"windows-x64", "windows-x86", "windows-aarch64",
"linux-aarch64", "linux-arm32", "linux-ppc64le", "linux-s390x",
"linux-riscv64"
"linux-aarch64", "linux-arm32", "linux-ppc64le", "linux-s390x"
];
// These are the base settings for all the main build profiles.
common.main_profile_base = {
dependencies: ["boot_jdk", "gnumake", "jtreg", "jib", "autoconf", "jmh", "jcov"],
default_make_targets: ["product-bundles", "test-bundles", "static-libs-bundles"],
configure_args: concat(
configure_args: concat("--enable-jtreg-failure-handler",
"--with-exclude-translations=es,fr,it,ko,pt_BR,sv,ca,tr,cs,sk,ja_JP_A,ja_JP_HA,ja_JP_HI,ja_JP_I,zh_TW,zh_HK",
"--disable-manpages",
"--disable-jvm-feature-shenandoahgc",
@@ -390,7 +388,7 @@ var getJibProfilesCommon = function (input, data) {
};
};
common.boot_jdk_version = "20";
common.boot_jdk_version = "19";
common.boot_jdk_build_number = "36";
common.boot_jdk_home = input.get("boot_jdk", "install_path") + "/jdk-"
+ common.boot_jdk_version
@@ -463,8 +461,7 @@ var getJibProfilesProfiles = function (input, common, data) {
target_cpu: "x86",
build_cpu: "x64",
dependencies: ["devkit", "gtest"],
configure_args: concat(common.configure_args_32bit,
"--enable-deprecated-ports"),
configure_args: concat(common.configure_args_32bit),
},
"windows-aarch64": {
@@ -521,17 +518,6 @@ var getJibProfilesProfiles = function (input, common, data) {
"--disable-warnings-as-errors"
],
},
"linux-riscv64": {
target_os: "linux",
target_cpu: "riscv64",
build_cpu: "x64",
dependencies: ["devkit", "gtest", "build_devkit"],
configure_args: [
"--openjdk-target=riscv64-linux-gnu", "--with-freetype=bundled",
"--disable-warnings-as-errors"
],
},
};
// Add the base settings to all the main profiles
@@ -588,23 +574,21 @@ var getJibProfilesProfiles = function (input, common, data) {
"linux-x64-zero": {
target_os: "linux",
target_cpu: "x64",
dependencies: ["devkit", "gtest", "libffi"],
dependencies: ["devkit", "gtest"],
configure_args: concat(common.configure_args_64bit, [
"--with-zlib=system",
"--with-jvm-variants=zero",
"--with-libffi=" + input.get("libffi", "home_path"),
"--enable-libffi-bundling",
"--enable-libffi-bundling"
])
},
"linux-aarch64-zero": {
target_os: "linux",
target_cpu: "aarch64",
dependencies: ["devkit", "gtest", "libffi"],
dependencies: ["devkit", "gtest"],
configure_args: concat(common.configure_args_64bit, [
"--with-zlib=system",
"--with-jvm-variants=zero",
"--with-libffi=" + input.get("libffi", "home_path"),
"--enable-libffi-bundling"
])
},
@@ -613,11 +597,10 @@ var getJibProfilesProfiles = function (input, common, data) {
target_os: "linux",
target_cpu: "x86",
build_cpu: "x64",
dependencies: ["devkit", "gtest", "libffi"],
dependencies: ["devkit", "gtest"],
configure_args: concat(common.configure_args_32bit, [
"--with-zlib=system",
"--with-jvm-variants=zero",
"--with-libffi=" + input.get("libffi", "home_path"),
"--enable-libffi-bundling"
])
}
@@ -730,10 +713,7 @@ var getJibProfilesProfiles = function (input, common, data) {
},
"linux-s390x": {
platform: "linux-s390x",
},
"linux-riscv64": {
platform: "linux-riscv64",
},
}
}
// Generate common artifacts for all main profiles
Object.keys(artifactData).forEach(function (name) {
@@ -748,40 +728,6 @@ var getJibProfilesProfiles = function (input, common, data) {
common.debug_profile_artifacts(artifactData[name]));
});
// Define artifact just for linux-x64-zero, which is the only one we test on
["linux-x64"].forEach(function (name) {
var o = artifactData[name]
var pf = o.platform
var jdk_subdir = (o.jdk_subdir != null ? o.jdk_subdir : "jdk-" + data.version);
var jdk_suffix = (o.jdk_suffix != null ? o.jdk_suffix : "tar.gz");
var zeroName = name + "-zero";
profiles[zeroName].artifacts = {
jdk: {
local: "bundles/\\(jdk.*bin." + jdk_suffix + "\\)",
remote: [
"bundles/" + pf + "/jdk-" + data.version + "_" + pf + "_bin-zero." + jdk_suffix,
],
subdir: jdk_subdir,
exploded: "images/jdk",
},
test: {
local: "bundles/\\(jdk.*bin-tests.tar.gz\\)",
remote: [
"bundles/" + pf + "/jdk-" + data.version + "_" + pf + "_bin-zero-tests.tar.gz",
],
exploded: "images/test"
},
jdk_symbols: {
local: "bundles/\\(jdk.*bin-symbols.tar.gz\\)",
remote: [
"bundles/" + pf + "/jdk-" + data.version + "_" + pf + "_bin-zero-symbols.tar.gz",
],
subdir: jdk_subdir,
exploded: "images/jdk"
},
};
});
buildJdkDep = input.build_os + "-" + input.build_cpu + ".jdk";
docsProfiles = {
"docs": {
@@ -851,6 +797,24 @@ var getJibProfilesProfiles = function (input, common, data) {
}
});
// Define the reference implementation profiles. These are basically the same
// as the open profiles, but upload artifacts to a different location.
common.main_profile_names.forEach(function (name) {
var riName = name + "-ri";
var riDebugName = riName + common.debug_suffix;
var openName = name + common.open_suffix;
var openDebugName = openName + common.debug_suffix;
profiles[riName] = clone(profiles[openName]);
profiles[riDebugName] = clone(profiles[openDebugName]);
// Rewrite all remote dirs to "bundles/openjdk/BCL/..."
for (artifactName in profiles[riName].artifacts) {
var artifact = profiles[riName].artifacts[artifactName];
artifact.remote = replaceAll(
"\/GPL\/", "/BCL/",
(artifact.remote != null ? artifact.remote : artifact.local));
}
});
// For open profiles, the non-debug jdk bundles, need an "open" prefix on the
// remote bundle names, forming the word "openjdk". See JDK-8188789.
common.main_profile_names.forEach(function (name) {
@@ -889,7 +853,7 @@ var getJibProfilesProfiles = function (input, common, data) {
[ "linux-aarch64", "linux-x64", "macosx-x64", "macosx-aarch64", "windows-x64" ]
.forEach(function (name) {
var o = artifactData[name]
var jdk_subdir = "jdk-" + data.version;
var jdk_subdir = (o.jdk_subdir != null ? o.jdk_subdir : "jdk-" + data.version);
var jdk_suffix = (o.jdk_suffix != null ? o.jdk_suffix : "tar.gz");
var pf = o.platform
var jcovName = name + "-jcov";
@@ -945,7 +909,10 @@ var getJibProfilesProfiles = function (input, common, data) {
target_os: input.build_os,
target_cpu: input.build_cpu,
dependencies: [ "jtreg", "gnumake", "boot_jdk", "devkit", "jib" ],
labels: "test"
labels: "test",
environment: {
"JT_JAVA": common.boot_jdk_home
}
}
};
profiles = concatObjects(profiles, testOnlyProfiles);
@@ -976,7 +943,7 @@ var getJibProfilesProfiles = function (input, common, data) {
target_cpu: input.build_cpu,
dependencies: [
"jtreg", "gnumake", "boot_jdk", "devkit", "jib", "jcov", testedProfileJdk,
testedProfileTest,
testedProfileTest, testedProfile + ".jdk_symbols",
],
src: "src.conf",
make_args: testOnlyMake,
@@ -990,9 +957,6 @@ var getJibProfilesProfiles = function (input, common, data) {
labels: "test"
}
};
if (!testedProfile.endsWith("-jcov")) {
testOnlyProfilesPrebuilt["run-test-prebuilt"]["dependencies"].push(testedProfile + ".jdk_symbols");
}
// If actually running the run-test-prebuilt profile, verify that the input
// variable is valid and if so, add the appropriate target_* values from
@@ -1022,25 +986,11 @@ var getJibProfilesProfiles = function (input, common, data) {
dependencies: [ "lldb" ],
environment_path: [
input.get("gnumake", "install_path") + "/bin",
input.get("lldb", "install_path") + "/Xcode/Contents/Developer/usr/bin",
input.get("lldb", "install_path") + "/Xcode.app/Contents/Developer/usr/bin",
],
};
profiles["run-test"] = concatObjects(profiles["run-test"], macosxRunTestExtra);
profiles["run-test-prebuilt"] = concatObjects(profiles["run-test-prebuilt"], macosxRunTestExtra);
} else if (input.build_os == "windows") {
// On windows, add the devkit debugger to the path in all the run-test profiles
// to make them available to the jtreg failure handler.
var archDir = "x64";
if (input.build_arch == "aarch64") {
archDir = "arm64"
}
windowsRunTestExtra = {
environment_path: [
input.get("devkit", "install_path") + "/10/Debuggers/" + archDir
]
}
profiles["run-test"] = concatObjects(profiles["run-test"], windowsRunTestExtra);
profiles["run-test-prebuilt"] = concatObjects(profiles["run-test-prebuilt"], windowsRunTestExtra);
}
// The profile run-test-prebuilt defines src.conf as the src bundle. When
@@ -1081,13 +1031,12 @@ var getJibProfilesDependencies = function (input, common) {
var devkit_platform_revisions = {
linux_x64: "gcc11.2.0-OL6.4+1.0",
macosx: "Xcode12.4+1.1",
windows_x64: "VS2022-17.1.0+1.1",
linux_aarch64: input.build_cpu == "x64" ? "gcc11.2.0-OL7.6+1.1" : "gcc11.2.0-OL7.6+1.0",
macosx: "Xcode12.4+1.0",
windows_x64: "VS2022-17.1.0+1.0",
linux_aarch64: "gcc11.2.0-OL7.6+1.0",
linux_arm: "gcc8.2.0-Fedora27+1.0",
linux_ppc64le: "gcc8.2.0-Fedora27+1.0",
linux_s390x: "gcc8.2.0-Fedora27+1.0",
linux_riscv64: "gcc11.3.0-Fedora_rawhide_68692+1.1"
linux_s390x: "gcc8.2.0-Fedora27+1.0"
};
var devkit_platform = (input.target_cpu == "x86"
@@ -1128,23 +1077,20 @@ var getJibProfilesDependencies = function (input, common) {
environment_path: common.boot_jdk_home + "/bin"
}
var makeRevision = "4.0+1.0";
var makeBinSubDir = "/bin";
var makeModule = "gnumake-" + input.build_platform;
if (input.build_os == "windows") {
makeModule = "gnumake-" + input.build_osenv_platform;
if (input.build_osenv == "cygwin") {
var versionArray = input.build_osenv_version.split(/\./);
var majorVer = parseInt(versionArray[0]);
var minorVer = parseInt(versionArray[1]);
if (majorVer > 3 || (majorVer == 3 && minorVer >= 3)) {
makeRevision = "4.3+1.0";
var pandoc_version;
if (input.build_cpu == "aarch64") {
if (input.build_os == "macosx") {
pandoc_version = "2.14.0.2+1.0";
} else {
makeBinSubDir = "/cygwin/bin";
pandoc_version = "2.5+1.0";
}
} else {
pandoc_version = "2.3.1+1.0";
}
}
var makeBinDir = input.get("gnumake", "install_path") + makeBinSubDir;
var makeBinDir = (input.build_os == "windows"
? input.get("gnumake", "install_path") + "/cygwin/bin"
: input.get("gnumake", "install_path") + "/bin");
var dependencies = {
boot_jdk: boot_jdk,
@@ -1173,7 +1119,7 @@ var getJibProfilesDependencies = function (input, common) {
organization: common.organization,
ext: "tar.gz",
module: "devkit-macosx" + (input.build_cpu == "x64" ? "_x64" : ""),
revision: (input.build_cpu == "x64" ? "Xcode11.3.1-MacOSX10.15+1.2" : devkit_platform_revisions[devkit_platform])
revision: (input.build_cpu == "x64" ? "Xcode11.3.1-MacOSX10.15+1.1" : devkit_platform_revisions[devkit_platform])
},
cups: {
@@ -1185,9 +1131,9 @@ var getJibProfilesDependencies = function (input, common) {
jtreg: {
server: "jpg",
product: "jtreg",
version: "7.3.1",
version: "7",
build_number: "1",
file: "bundles/jtreg-7.3.1+1.zip",
file: "bundles/jtreg-7+1.zip",
environment_name: "JT_HOME",
environment_path: input.get("jtreg", "home_path") + "/bin",
configure_args: "--with-jtreg=" + input.get("jtreg", "home_path"),
@@ -1201,7 +1147,7 @@ var getJibProfilesDependencies = function (input, common) {
jcov: {
organization: common.organization,
revision: "3.0-14-jdk-asm+1.0",
revision: "3.0-13-jdk-asm+1.0",
ext: "zip",
environment_name: "JCOV_HOME",
},
@@ -1209,12 +1155,18 @@ var getJibProfilesDependencies = function (input, common) {
gnumake: {
organization: common.organization,
ext: "tar.gz",
revision: makeRevision,
module: makeModule,
revision: "4.0+1.0",
module: (input.build_os == "windows"
? "gnumake-" + input.build_osenv_platform
: "gnumake-" + input.build_platform),
configure_args: "MAKE=" + makeBinDir + "/make",
environment: {
"MAKE": makeBinDir + "/make"
},
environment_path: makeBinDir
},
@@ -1241,7 +1193,7 @@ var getJibProfilesDependencies = function (input, common) {
pandoc: {
organization: common.organization,
ext: "tar.gz",
revision: "2.19.2+1.0",
revision: pandoc_version,
module: "pandoc-" + input.build_platform,
configure_args: "PANDOC=" + input.get("pandoc", "install_path") + "/pandoc/pandoc",
environment_path: input.get("pandoc", "install_path") + "/pandoc"
@@ -1267,14 +1219,7 @@ var getJibProfilesDependencies = function (input, common) {
gtest: {
organization: common.organization,
ext: "tar.gz",
revision: "1.14.0+1.0"
},
libffi: {
organization: common.organization,
module: "libffi-" + input.target_platform,
ext: "tar.gz",
revision: "3.4.2+1.0"
revision: "1.8.1"
},
};

View File

@@ -43,6 +43,7 @@ BOOT_MODULES= \
java.rmi \
java.security.sasl \
java.xml \
jdk.incubator.concurrent \
jdk.incubator.vector \
jdk.internal.vm.ci \
jdk.jfr \

Some files were not shown because too many files have changed in this diff Show More