Compare commits

..

24 Commits

Author SHA1 Message Date
Vitaly Provodin
8052f7acac updated JTreg exclude list 2020-07-21 07:00:45 +03:00
Mikhail Grishchenko
b45480e58b updated JTreg exclude list 2020-07-21 07:00:44 +03:00
Vitaly Provodin
43d2007cc2 updated JTreg exclude list 2020-07-21 07:00:44 +03:00
Vitaly Provodin
df33637256 updated JTreg exclude list 2020-07-21 07:00:44 +03:00
Vitaly Provodin
d750a759c7 switch boot_jdk to 14.0.1 2020-07-21 07:00:44 +03:00
Vitaly Provodin
aae0e57a6a updated JTreg exclude list 2020-07-21 07:00:44 +03:00
Vitaly Provodin
dbf038672a restore actual docker file 2020-07-21 07:00:44 +03:00
Vitaly Provodin
39c700c19e remove the Nashorn JavaScript Engine
see http://openjdk.java.net/jeps/372,
https://bugs.openjdk.java.net/browse/JDK-8236933 and https://bugs.openjdk.java.net/browse/JDK-8241749
2020-07-21 07:00:44 +03:00
Vitaly Provodin
569f5e4b8f updated JTreg exclude list 2020-07-21 07:00:44 +03:00
Vitaly Provodin
f53114cd68 updated JTreg exclude list 2020-07-21 07:00:43 +03:00
Vitaly Provodin
30aebf0e9c updated JTreg exclude list 2020-07-21 07:00:43 +03:00
Vitaly Provodin
0a89c53de1 updated JTreg exclude list 2020-07-21 07:00:43 +03:00
Vitaly Provodin
1e9c169137 updated JTreg exclude list 2020-07-21 07:00:43 +03:00
Vitaly Provodin
96287d8ce4 JBR-2212 add JBR building scripts 2020-07-21 07:00:43 +03:00
Vitaly Provodin
d8b1f76537 update modules.list from master 2020-07-21 07:00:43 +03:00
Alexey Ushakov
cd55b8d9c8 Updated Docker config to build jbr15 2020-07-21 07:00:43 +03:00
Vitaly Provodin
ea5dc3989a JBR-2130: remove jdk.pack from JBR 2020-07-21 07:00:43 +03:00
Vitaly Provodin
e1054eac82 updated JTreg exclude list 2020-07-21 07:00:42 +03:00
Vitaly Provodin
e06b4d1b47 updated JTreg exclude list 2020-07-21 07:00:42 +03:00
Vitaly.Provodin
38b742c737 updated JTreg exclude list 2020-07-21 07:00:42 +03:00
Anton Tarasov
0d00289d6b Correct modules.list 2020-07-21 07:00:42 +03:00
Anton Tarasov
185844040b Add modules.list 2020-07-21 07:00:42 +03:00
Vitaly Provodin
b671f1feee add notarization scripts 2020-07-21 07:00:42 +03:00
Alexey Ushakov
372ea1941c Added Dockerfile for linux build environment 2020-07-21 07:00:42 +03:00
22359 changed files with 952831 additions and 1528248 deletions

1
.gitattributes vendored
View File

@@ -1 +0,0 @@
* -text

File diff suppressed because it is too large Load Diff

4
.gitignore vendored
View File

@@ -2,7 +2,6 @@
/dist/
/.idea/
/.vscode/
/nbproject/
nbproject/private/
/webrev
/.src-rev
@@ -15,6 +14,3 @@ test/nashorn/lib
NashornProfile.txt
**/JTreport/**
**/JTwork/**
/src/utils/LogCompilation/target/
/.project/
/.settings/

18
.hgignore Normal file
View File

@@ -0,0 +1,18 @@
^build/
^dist/
^.idea/
^.vscode/
nbproject/private/
^webrev
^.src-rev$
^.jib/
(^|/)\.DS_Store
(^|/)\.metadata/
(^|/)\.recommenders/
test/nashorn/script/external
test/nashorn/lib
NashornProfile.txt
(^|/)JTreport/
(^|/)JTwork/
(^|/)\.git/
^src/utils/hsdis/build/

12
.hgtags
View File

@@ -650,15 +650,3 @@ a32f58c6b8be81877411767de7ba9c4cf087c1b5 jdk-15+31
143e258f64af490010eb7e0bacc1cfaeceff0993 jdk-16+5
2dad000726b8d5db9f3df647fb4949d88f269dd4 jdk-15+32
4a8fd81d64bafa523cddb45f82805536edace106 jdk-16+6
6b65f4e7a975628df51ef755b02642075390041d jdk-15+33
c3a4a7ea7c304cabdacdc31741eb94c51351668d jdk-16+7
b0817631d2f4395508cb10e81c3858a94d9ae4de jdk-15+34
0a73d6f3aab48ff6d7e61e47f0bc2d87a054f217 jdk-16+8
fd60c3146a024037cdd9be34c645bb793995a7cc jdk-15+35
c075a286cc7df767cce28e8057d6ec5051786490 jdk-16+9
b01985b4f88f554f97901e53e1ba314681dd9c19 jdk-16+10
e3f940bd3c8fcdf4ca704c6eb1ac745d155859d5 jdk-15+36
5c18d696c7ce724ca36df13933aa53f50e12b9e0 jdk-16+11
fc8e62b399bd93d06e8d13dc3b384c450e853dcd jdk-16+12
fd07cdb26fc70243ef23d688b545514f4ddf1c2b jdk-16+13
36b29df125dc88f11657ce93b4998aa9ff5f5d41 jdk-16+14

View File

@@ -1,34 +1,2 @@
[general]
project=jdk
jbs=JDK
[checks]
error=author,committer,reviewers,merge,issues,executable,symlink,message,hg-tag,whitespace,problemlists
[repository]
tags=(?:jdk-(?:[1-9]([0-9]*)(?:\.(?:0|[1-9][0-9]*)){0,4})(?:\+(?:(?:[0-9]+))|(?:-ga)))|(?:jdk[4-9](?:u\d{1,3})?-(?:(?:b\d{2,3})|(?:ga)))|(?:hs\d\d(?:\.\d{1,2})?-b\d\d)
branches=
[census]
version=0
domain=openjdk.org
[checks "whitespace"]
files=.*\.cpp|.*\.hpp|.*\.c|.*\.h|.*\.java|.*\.cc|.*\.hh|.*\.m|.*\.mm|.*\.gmk|.*\.m4|.*\.ac|Makefile
ignore-tabs=.*\.gmk|Makefile
[checks "merge"]
message=Merge
[checks "reviewers"]
reviewers=1
ignore=duke
[checks "committer"]
role=committer
[checks "issues"]
pattern=^([124-8][0-9]{6}): (\S.*)$
[checks "problemlists"]
dirs=test/jdk|test/langtools|test/lib-test|test/hotspot/jtreg|test/jaxp
bugids=dup

View File

@@ -1,3 +0,0 @@
# Contributing to the JDK
Please see <https://openjdk.java.net/contribute/> for how to contribute.

12
README Normal file
View File

@@ -0,0 +1,12 @@
Welcome to the JDK!
===================
For information about building the JDK, including how to retrieve all
of the source code, please see either of these files:
* doc/building.html (html version)
* doc/building.md (markdown version)
See http://openjdk.java.net/ for more information about the OpenJDK
Community and the JDK.

View File

@@ -1,11 +0,0 @@
# Welcome to the JDK!
For build instructions please see the
[online documentation](https://openjdk.java.net/groups/build/doc/building.html),
or either of these files:
- [doc/building.html](doc/building.html) (html version)
- [doc/building.md](doc/building.md) (markdown version)
See <https://openjdk.java.net/> for more information about
the OpenJDK Community and the JDK.

View File

@@ -25,7 +25,7 @@
# Shell script for generating an IDEA project from a given list of modules
usage() {
echo "usage: $0 [-h|--help] [-v|--verbose] [-o|--output <path>] [-c|--conf <conf_name>] [modules]+"
echo "usage: $0 [-h|--help] [-v|--verbose] [-o|--output <path>] [modules]+"
exit 1
}
@@ -37,7 +37,6 @@ cd $TOP;
IDEA_OUTPUT=$TOP/.idea
VERBOSE="false"
CONF_ARG=
while [ $# -gt 0 ]
do
case $1 in
@@ -53,10 +52,6 @@ do
IDEA_OUTPUT=$2/.idea
shift
;;
-c | --conf )
CONF_ARG="CONF_NAME=$2"
shift
;;
-*) # bad option
usage
@@ -96,7 +91,7 @@ if [ "$VERBOSE" = "true" ] ; then
echo "idea template dir: $IDEA_TEMPLATE"
fi
cd $TOP ; make -f "$IDEA_MAKE/idea.gmk" -I $MAKE_DIR/.. idea MAKEOVERRIDES= OUT=$IDEA_OUTPUT/env.cfg MODULES="$*" $CONF_ARG || exit 1
cd $TOP ; make -f "$IDEA_MAKE/idea.gmk" -I $MAKE_DIR/.. idea MAKEOVERRIDES= OUT=$IDEA_OUTPUT/env.cfg MODULES="$*" || exit 1
cd $SCRIPT_DIR
. $IDEA_OUTPUT/env.cfg
@@ -152,32 +147,22 @@ add_replacement() {
add_replacement "###MODULE_NAMES###" "$MODULE_NAMES"
add_replacement "###VCS_TYPE###" "$VCS_TYPE"
SPEC_DIR=`dirname $SPEC`
if [ "x$CYGPATH" != "x" ]; then
add_replacement "###BUILD_DIR###" "`$CYGPATH -am $SPEC_DIR`"
add_replacement "###IMAGES_DIR###" "`$CYGPATH -am $SPEC_DIR`/images/jdk"
add_replacement "###ROOT_DIR###" "`$CYGPATH -am $TOPLEVEL_DIR`"
add_replacement "###IDEA_DIR###" "`$CYGPATH -am $IDEA_OUTPUT`"
if [ "x$JT_HOME" = "x" ]; then
add_replacement "###JTREG_HOME###" ""
else
add_replacement "###JTREG_HOME###" "`$CYGPATH -am $JT_HOME`"
fi
elif [ "x$WSL_DISTRO_NAME" != "x" ]; then
add_replacement "###BUILD_DIR###" "`wslpath -am $SPEC_DIR`"
add_replacement "###IMAGES_DIR###" "`wslpath -am $SPEC_DIR`/images/jdk"
add_replacement "###ROOT_DIR###" "`wslpath -am $TOPLEVEL_DIR`"
add_replacement "###IDEA_DIR###" "`wslpath -am $IDEA_OUTPUT`"
if [ "x$JT_HOME" = "x" ]; then
add_replacement "###JTREG_HOME###" ""
else
add_replacement "###JTREG_HOME###" "`wslpath -am $JT_HOME`"
fi
else
if [ "x$CYGPATH" = "x" ]; then
add_replacement "###BUILD_DIR###" "$SPEC_DIR"
add_replacement "###JTREG_HOME###" "$JT_HOME"
add_replacement "###IMAGES_DIR###" "$SPEC_DIR/images/jdk"
add_replacement "###ROOT_DIR###" "$TOPLEVEL_DIR"
add_replacement "###IDEA_DIR###" "$IDEA_OUTPUT"
else
add_replacement "###BUILD_DIR###" "`cygpath -am $SPEC_DIR`"
add_replacement "###IMAGES_DIR###" "`cygpath -am $SPEC_DIR`/images/jdk"
add_replacement "###ROOT_DIR###" "`cygpath -am $TOPLEVEL_DIR`"
add_replacement "###IDEA_DIR###" "`cygpath -am $IDEA_OUTPUT`"
if [ "x$JT_HOME" = "x" ]; then
add_replacement "###JTREG_HOME###" ""
else
add_replacement "###JTREG_HOME###" "`cygpath -am $JT_HOME`"
fi
fi
SOURCE_PREFIX="<sourceFolder url=\"file://"
@@ -185,22 +170,9 @@ SOURCE_POSTFIX="\" isTestSource=\"false\" />"
for root in $MODULE_ROOTS; do
if [ "x$CYGPATH" != "x" ]; then
root=`$CYGPATH -am $root`
elif [ "x$WSL_DISTRO_NAME" != "x" ]; then
root=`wslpath -am $root`
fi
VM_CI="jdk.internal.vm.ci/share/classes"
VM_COMPILER="src/jdk.internal.vm.compiler/share/classes"
if test "${root#*$VM_CI}" != "$root" || test "${root#*$VM_COMPILER}" != "$root"; then
for subdir in "$root"/*; do
if [ -d "$subdir" ]; then
SOURCES=$SOURCES" $SOURCE_PREFIX""$subdir"/src"$SOURCE_POSTFIX"
fi
done
else
SOURCES=$SOURCES" $SOURCE_PREFIX""$root""$SOURCE_POSTFIX"
root=`cygpath -am $root`
fi
SOURCES=$SOURCES" $SOURCE_PREFIX""$root""$SOURCE_POSTFIX"
done
add_replacement "###SOURCE_ROOTS###" "$SOURCES"
@@ -224,30 +196,16 @@ fi
CP=$ANT_HOME/lib/ant.jar
rm -rf $CLASSES; mkdir $CLASSES
if [ "x$CYGPATH" != "x" ] ; then ## CYGPATH may be set in env.cfg
JAVAC_SOURCE_FILE=`$CYGPATH -am $IDEA_OUTPUT/src/idea/IdeaLoggerWrapper.java`
JAVAC_SOURCE_PATH=`$CYGPATH -am $IDEA_OUTPUT/src`
JAVAC_CLASSES=`$CYGPATH -am $CLASSES`
JAVAC_CP=`$CYGPATH -am $CP`
JAVAC=javac
elif [ "x$WSL_DISTRO_NAME" != "x" ]; then
JAVAC_SOURCE_FILE=`realpath --relative-to=./ $IDEA_OUTPUT/src/idea/IdeaLoggerWrapper.java`
JAVAC_SOURCE_PATH=`realpath --relative-to=./ $IDEA_OUTPUT/src`
JAVAC_CLASSES=`realpath --relative-to=./ $CLASSES`
ANT_TEMP=`mktemp -d -p ./`
cp $ANT_HOME/lib/ant.jar $ANT_TEMP/ant.jar
JAVAC_CP=$ANT_TEMP/ant.jar
JAVAC=javac.exe
else
if [ "x$CYGPATH" = "x" ] ; then ## CYGPATH may be set in env.cfg
JAVAC_SOURCE_FILE=$IDEA_OUTPUT/src/idea/IdeaLoggerWrapper.java
JAVAC_SOURCE_PATH=$IDEA_OUTPUT/src
JAVAC_CLASSES=$CLASSES
JAVAC_CP=$CP
JAVAC=javac
else
JAVAC_SOURCE_FILE=`cygpath -am $IDEA_OUTPUT/src/idea/IdeaLoggerWrapper.java`
JAVAC_SOURCE_PATH=`cygpath -am $IDEA_OUTPUT/src`
JAVAC_CLASSES=`cygpath -am $CLASSES`
JAVAC_CP=`cygpath -am $CP`
fi
$BOOT_JDK/bin/$JAVAC -d $JAVAC_CLASSES -sourcepath $JAVAC_SOURCE_PATH -cp $JAVAC_CP $JAVAC_SOURCE_FILE
if [ "x$WSL_DISTRO_NAME" != "x" ]; then
rm -rf $ANT_TEMP
fi
$BOOT_JDK/bin/javac -d $JAVAC_CLASSES -sourcepath $JAVAC_SOURCE_PATH -cp $JAVAC_CP $JAVAC_SOURCE_FILE

View File

@@ -76,9 +76,8 @@
<li><a href="#specifying-the-target-platform">Specifying the Target Platform</a></li>
<li><a href="#toolchain-considerations">Toolchain Considerations</a></li>
<li><a href="#native-libraries">Native Libraries</a></li>
<li><a href="#cross-compiling-with-debian-sysroots">Cross compiling with Debian sysroots</a></li>
<li><a href="#creating-and-using-sysroots-with-qemu-deboostrap">Creating And Using Sysroots With qemu-deboostrap</a></li>
<li><a href="#building-for-armaarch64">Building for ARM/aarch64</a></li>
<li><a href="#building-for-musl">Building for musl</a></li>
<li><a href="#verifying-the-build">Verifying the Build</a></li>
</ul></li>
<li><a href="#build-performance">Build Performance</a><ul>
@@ -97,10 +96,12 @@
<li><a href="#getting-help">Getting Help</a></li>
</ul></li>
<li><a href="#hints-and-suggestions-for-advanced-users">Hints and Suggestions for Advanced Users</a><ul>
<li><a href="#setting-up-a-repository-for-pushing-changes-defpath">Setting Up a Repository for Pushing Changes (defpath)</a></li>
<li><a href="#bash-completion">Bash Completion</a></li>
<li><a href="#using-multiple-configurations">Using Multiple Configurations</a></li>
<li><a href="#handling-reconfigurations">Handling Reconfigurations</a></li>
<li><a href="#using-fine-grained-make-targets">Using Fine-Grained Make Targets</a></li>
<li><a href="#learn-about-mercurial">Learn About Mercurial</a></li>
</ul></li>
<li><a href="#understanding-the-build-system">Understanding the Build System</a><ul>
<li><a href="#configurations">Configurations</a></li>
@@ -114,10 +115,10 @@
</ul>
</nav>
<h2 id="tldr-instructions-for-the-impatient">TL;DR (Instructions for the Impatient)</h2>
<p>If you are eager to try out building the JDK, these simple steps works most of the time. They assume that you have installed Git (and Cygwin if running on Windows) and cloned the top-level JDK repository that you want to build.</p>
<p>If you are eager to try out building the JDK, these simple steps works most of the time. They assume that you have installed Mercurial (and Cygwin if running on Windows) and cloned the top-level JDK repository that you want to build.</p>
<ol type="1">
<li><p><a href="#getting-the-source-code">Get the complete source code</a>:<br />
<code>git clone https://git.openjdk.java.net/jdk/</code></p></li>
<code>hg clone http://hg.openjdk.java.net/jdk/jdk</code></p></li>
<li><p><a href="#running-configure">Run configure</a>:<br />
<code>bash configure</code></p>
<p>If <code>configure</code> fails due to missing dependencies (to either the <a href="#native-compiler-toolchain-requirements">toolchain</a>, <a href="#build-tools-requirements">build tools</a>, <a href="#external-library-requirements">external libraries</a> or the <a href="#boot-jdk-requirements">boot JDK</a>), most of the time it prints a suggestion on how to resolve the situation on your platform. Follow the instructions, and try running <code>bash configure</code> again.</p></li>
@@ -133,8 +134,8 @@
<p>The JDK is a complex software project. Building it requires a certain amount of technical expertise, a fair number of dependencies on external software, and reasonably powerful hardware.</p>
<p>If you just want to use the JDK and not build it yourself, this document is not for you. See for instance <a href="http://openjdk.java.net/install">OpenJDK installation</a> for some methods of installing a prebuilt JDK.</p>
<h2 id="getting-the-source-code">Getting the Source Code</h2>
<p>Make sure you are getting the correct version. As of JDK 10, the source is no longer split into separate repositories so you only need to clone one single repository. At the <a href="https://git.openjdk.java.net/">OpenJDK Git site</a> you can see a list of all available repositories. If you want to build an older version, e.g. JDK 11, it is recommended that you get the <code>jdk11u</code> repo, which contains incremental updates, instead of the <code>jdk11</code> repo, which was frozen at JDK 11 GA.</p>
<p>If you are new to Git, a good place to start is the book <a href="https://git-scm.com/book/en/v2">Pro Git</a>. The rest of this document assumes a working knowledge of Git.</p>
<p>Make sure you are getting the correct version. As of JDK 10, the source is no longer split into separate repositories so you only need to clone one single repository. At the <a href="http://hg.openjdk.java.net/">OpenJDK Mercurial server</a> you can see a list of all available repositories. If you want to build an older version, e.g. JDK 8, it is recommended that you get the <code>jdk8u</code> forest, which contains incremental updates, instead of the <code>jdk8</code> forest, which was frozen at JDK 8 GA.</p>
<p>If you are new to Mercurial, a good place to start is the <a href="http://www.mercurial-scm.org/guide">Mercurial Beginner's Guide</a>. The rest of this document assumes a working knowledge of Mercurial.</p>
<h3 id="special-considerations">Special Considerations</h3>
<p>For a smooth building experience, it is recommended that you follow these rules on where and how to check out the source code.</p>
<ul>
@@ -145,11 +146,7 @@
<ul>
<li><p>Create the directory that is going to contain the top directory of the JDK clone by using the <code>mkdir</code> command in the Cygwin bash shell. That is, do <em>not</em> create it using Windows Explorer. This will ensure that it will have proper Cygwin attributes, and that it's children will inherit those attributes.</p></li>
<li><p>Do not put the JDK clone in a path under your Cygwin home directory. This is especially important if your user name contains spaces and/or mixed upper and lower case letters.</p></li>
<li><p>You need to install a git client. You have two choices, Cygwin git or Git for Windows. Unfortunately there are pros and cons with each choice.</p>
<ul>
<li><p>The Cygwin <code>git</code> client has no line ending issues and understands Cygwin paths (which are used throughout the JDK build system). However, it does not currently work well with the Skara CLI tooling. Please see the <a href="https://wiki.openjdk.java.net/display/SKARA/Skara#Skara-Git">Skara wiki on Git clients</a> for up-to-date information about the Skara git client support.</p></li>
<li><p>The <a href="https://gitforwindows.org">Git for Windows</a> client has issues with line endings, and do not understand Cygwin paths. It does work well with the Skara CLI tooling, however. To alleviate the line ending problems, make sure you set <code>core.autocrlf</code> to <code>false</code> (this is asked during installation).</p></li>
</ul></li>
<li><p>Clone the JDK repository using the Cygwin command line <code>hg</code> client as instructed in this document. That is, do <em>not</em> use another Mercurial client such as TortoiseHg.</p></li>
</ul>
<p>Failure to follow this procedure might result in hard-to-debug build problems.</p></li>
</ul>
@@ -196,7 +193,7 @@
<p>Windows XP is not a supported platform, but all newer Windows should be able to build the JDK.</p>
<p>On Windows, it is important that you pay attention to the instructions in the <a href="#special-considerations">Special Considerations</a>.</p>
<p>Windows is the only non-POSIX OS supported by the JDK, and as such, requires some extra care. A POSIX support layer is required to build on Windows. Currently, the only supported such layers are Cygwin and Windows Subsystem for Linux (WSL). (Msys is no longer supported due to a too old bash; msys2 would likely be possible to support in a future version but that would require effort to implement.)</p>
<p>Internally in the build system, all paths are represented as Unix-style paths, e.g. <code>/cygdrive/c/git/jdk/Makefile</code> rather than <code>C:\git\jdk\Makefile</code>. This rule also applies to input to the build system, e.g. in arguments to <code>configure</code>. So, use <code>--with-msvcr-dll=/cygdrive/c/msvcr100.dll</code> rather than <code>--with-msvcr-dll=c:\msvcr100.dll</code>. For details on this conversion, see the section on <a href="#fixpath">Fixpath</a>.</p>
<p>Internally in the build system, all paths are represented as Unix-style paths, e.g. <code>/cygdrive/c/hg/jdk9/Makefile</code> rather than <code>C:\hg\jdk9\Makefile</code>. This rule also applies to input to the build system, e.g. in arguments to <code>configure</code>. So, use <code>--with-msvcr-dll=/cygdrive/c/msvcr100.dll</code> rather than <code>--with-msvcr-dll=c:\msvcr100.dll</code>. For details on this conversion, see the section on <a href="#fixpath">Fixpath</a>.</p>
<h4 id="cygwin">Cygwin</h4>
<p>A functioning <a href="http://www.cygwin.com/">Cygwin</a> environment is required for building the JDK on Windows. If you have a 64-bit OS, we strongly recommend using the 64-bit version of Cygwin.</p>
<p><strong>Note:</strong> Cygwin has a model of continuously updating all packages without any easy way to install or revert to a specific version of a package. This means that whenever you add or update a package in Cygwin, you might (inadvertently) update tools that are used by the JDK build process, and that can cause unexpected build problems.</p>
@@ -227,8 +224,6 @@
<pre><code>sudo apt-get install build-essential</code></pre>
<p>For rpm-based distributions (Fedora, Red Hat, etc), try this:</p>
<pre><code>sudo yum groupinstall &quot;Development Tools&quot;</code></pre>
<p>For Alpine Linux, aside from basic tooling, install the GNU versions of some programs:</p>
<pre><code>sudo apk add build-base bash grep zip</code></pre>
<h3 id="aix">AIX</h3>
<p>Please consult the AIX section of the <a href="https://wiki.openjdk.java.net/display/Build/Supported+Build+Platforms">Supported Build Platforms</a> OpenJDK Build Wiki page for details about which versions of AIX are supported.</p>
<h2 id="native-compiler-toolchain-requirements">Native Compiler (Toolchain) Requirements</h2>
@@ -270,7 +265,7 @@
<tbody>
<tr class="odd">
<td style="text-align: left;">Linux</td>
<td style="text-align: left;">gcc 10.2.0</td>
<td style="text-align: left;">gcc 9.2.0</td>
</tr>
<tr class="even">
<td style="text-align: left;">macOS</td>
@@ -278,17 +273,17 @@
</tr>
<tr class="odd">
<td style="text-align: left;">Windows</td>
<td style="text-align: left;">Microsoft Visual Studio 2019 update 16.7.2</td>
<td style="text-align: left;">Microsoft Visual Studio 2019 update 16.5.3</td>
</tr>
</tbody>
</table>
<p>All compilers are expected to be able to compile to the C99 language standard, as some C99 features are used in the source code. Microsoft Visual Studio doesn't fully support C99 so in practice shared code is limited to using C99 features that it does support.</p>
<h3 id="gcc">gcc</h3>
<p>The minimum accepted version of gcc is 5.0. Older versions will generate a warning by <code>configure</code> and are unlikely to work.</p>
<p>The JDK is currently known to be able to compile with at least version 10.2 of gcc.</p>
<p>The JDK is currently known to be able to compile with at least version 9.2 of gcc.</p>
<p>In general, any version between these two should be usable.</p>
<h3 id="clang">clang</h3>
<p>The minimum accepted version of clang is 3.5. Older versions will not be accepted by <code>configure</code>.</p>
<p>The minimum accepted version of clang is 3.2. Older versions will not be accepted by <code>configure</code>.</p>
<p>To use clang instead of gcc on Linux, use <code>--with-toolchain-type=clang</code>.</p>
<h3 id="apple-xcode">Apple Xcode</h3>
<p>The oldest supported version of Xcode is 8.</p>
@@ -297,9 +292,9 @@
<p>It is advisable to keep an older version of Xcode for building the JDK when updating Xcode. This <a href="http://iosdevelopertips.com/xcode/install-multiple-versions-of-xcode.html">blog page</a> has good suggestions on managing multiple Xcode versions. To use a specific version of Xcode, use <code>xcode-select -s</code> before running <code>configure</code>, or use <code>--with-toolchain-path</code> to point to the version of Xcode to use, e.g. <code>configure --with-toolchain-path=/Applications/Xcode8.app/Contents/Developer/usr/bin</code></p>
<p>If you have recently (inadvertently) updated your OS and/or Xcode version, and the JDK can no longer be built, please see the section on <a href="#problems-with-the-build-environment">Problems with the Build Environment</a>, and <a href="#getting-help">Getting Help</a> to find out if there are any recent, non-merged patches available for this update.</p>
<h3 id="microsoft-visual-studio">Microsoft Visual Studio</h3>
<p>The minimum accepted version of Visual Studio is 2017. Older versions will not be accepted by <code>configure</code> and will not work. The maximum accepted version of Visual Studio is 2019.</p>
<p>If you have multiple versions of Visual Studio installed, <code>configure</code> will by default pick the latest. You can request a specific version to be used by setting <code>--with-toolchain-version</code>, e.g. <code>--with-toolchain-version=2017</code>.</p>
<p>If you have Visual Studio installed but <code>configure</code> fails to detect it, it may be because of <a href="#spaces-in-path">spaces in path</a>.</p>
<p>The minimum accepted version of Visual Studio is 2010. Older versions will not be accepted by <code>configure</code>. The maximum accepted version of Visual Studio is 2019. Versions older than 2017 are unlikely to continue working for long.</p>
<p>If you have multiple versions of Visual Studio installed, <code>configure</code> will by default pick the latest. You can request a specific version to be used by setting <code>--with-toolchain-version</code>, e.g. <code>--with-toolchain-version=2015</code>.</p>
<p>If you get <code>LINK: fatal error LNK1123: failure during conversion to COFF: file invalid</code> when building using Visual Studio 2010, you have encountered <a href="http://support.microsoft.com/kb/2757355">KB2757355</a>, a bug triggered by a specific installation order. However, the solution suggested by the KB article does not always resolve the problem. See <a href="https://stackoverflow.com/questions/10888391">this stackoverflow discussion</a> for other suggestions.</p>
<h3 id="ibm-xl-cc">IBM XL C/C++</h3>
<p>Please consult the AIX section of the <a href="https://wiki.openjdk.java.net/display/Build/Supported+Build+Platforms">Supported Build Platforms</a> OpenJDK Build Wiki page for details about which versions of XLC are supported.</p>
<h2 id="boot-jdk-requirements">Boot JDK Requirements</h2>
@@ -319,8 +314,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install libfreetype6-dev</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install freetype-devel</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add freetype-dev</code>.</li>
<li>To install on macOS, try running <code>brew install freetype</code>.</li>
</ul>
<p>Use <code>--with-freetype-include=&lt;path&gt;</code> and <code>--with-freetype-lib=&lt;path&gt;</code> if <code>configure</code> does not automatically locate the platform FreeType files.</p>
<h3 id="cups">CUPS</h3>
@@ -328,7 +321,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install libcups2-dev</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install cups-devel</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add cups-dev</code>.</li>
</ul>
<p>Use <code>--with-cups=&lt;path&gt;</code> if <code>configure</code> does not properly locate your CUPS files.</p>
<h3 id="x11">X11</h3>
@@ -336,7 +328,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install libx11-dev libxext-dev libxrender-dev libxrandr-dev libxtst-dev libxt-dev</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install libXtst-devel libXt-devel libXrender-devel libXrandr-devel libXi-devel</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add libx11-dev libxext-dev libxrender-dev libxrandr-dev libxtst-dev libxt-dev</code>.</li>
</ul>
<p>Use <code>--with-x=&lt;path&gt;</code> if <code>configure</code> does not properly locate your X11 files.</p>
<h3 id="alsa">ALSA</h3>
@@ -344,7 +335,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install libasound2-dev</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install alsa-lib-devel</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add alsa-lib-dev</code>.</li>
</ul>
<p>Use <code>--with-alsa=&lt;path&gt;</code> if <code>configure</code> does not properly locate your ALSA files.</p>
<h3 id="libffi">libffi</h3>
@@ -352,7 +342,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install libffi-dev</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install libffi-devel</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add libffi-dev</code>.</li>
</ul>
<p>Use <code>--with-libffi=&lt;path&gt;</code> if <code>configure</code> does not properly locate your libffi files.</p>
<h2 id="build-tools-requirements">Build Tools Requirements</h2>
@@ -361,7 +350,6 @@
<ul>
<li>To install on an apt-based Linux, try running <code>sudo apt-get install autoconf</code>.</li>
<li>To install on an rpm-based Linux, try running <code>sudo yum install autoconf</code>.</li>
<li>To install on Alpine Linux, try running <code>sudo apk add autoconf</code>.</li>
<li>To install on macOS, try running <code>brew install autoconf</code>.</li>
<li>To install on Windows, try running <code>&lt;path to Cygwin setup&gt;/setup-x86_64 -q -P autoconf</code>.</li>
</ul>
@@ -404,7 +392,7 @@
<li><code>--enable-jvm-feature-&lt;feature&gt;</code> or <code>--disable-jvm-feature-&lt;feature&gt;</code> - Include (or exclude) <code>&lt;feature&gt;</code> as a JVM feature in Hotspot. You can also specify a list of features to be enabled, separated by space or comma, as <code>--with-jvm-features=&lt;feature&gt;[,&lt;feature&gt;...]</code>. If you prefix <code>&lt;feature&gt;</code> with a <code>-</code>, it will be disabled. These options will modify the default list of features for the JVM variant(s) you are building. For the <code>custom</code> JVM variant, the default list is empty. A complete list of valid JVM features can be found using <code>bash configure --help</code>.</li>
<li><code>--with-target-bits=&lt;bits&gt;</code> - Create a target binary suitable for running on a <code>&lt;bits&gt;</code> platform. Use this to create 32-bit output on a 64-bit build platform, instead of doing a full cross-compile. (This is known as a <em>reduced</em> build.)</li>
</ul>
<p>On Linux, BSD and AIX, it is possible to override where Java by default searches for runtime/JNI libraries. This can be useful in situations where there is a special shared directory for system JNI libraries. This setting can in turn be overridden at runtime by setting the <code>java.library.path</code> property.</p>
<p>On Linux, BSD and AIX, it is possible to override where Java by default searches for runtime/JNI libraries. This can be useful in situations where there is a special shared directory for system JNI libraries. This setting can in turn be overriden at runtime by setting the <code>java.library.path</code> property.</p>
<ul>
<li><code>--with-jni-libpath=&lt;path&gt;</code> - Use the specified path as a default when searching for runtime libraries.</li>
</ul>
@@ -443,7 +431,7 @@
<h3 id="configure-control-variables">Configure Control Variables</h3>
<p>It is possible to control certain aspects of <code>configure</code> by overriding the value of <code>configure</code> variables, either on the command line or in the environment.</p>
<p>Normally, this is <strong>not recommended</strong>. If used improperly, it can lead to a broken configuration. Unless you're well versed in the build system, this is hard to use properly. Therefore, <code>configure</code> will print a warning if this is detected.</p>
<p>However, there are a few <code>configure</code> variables, known as <em>control variables</em> that are supposed to be overridden on the command line. These are variables that describe the location of tools needed by the build, like <code>MAKE</code> or <code>GREP</code>. If any such variable is specified, <code>configure</code> will use that value instead of trying to autodetect the tool. For instance, <code>bash configure MAKE=/opt/gnumake4.0/bin/make</code>.</p>
<p>However, there are a few <code>configure</code> variables, known as <em>control variables</em> that are supposed to be overriden on the command line. These are variables that describe the location of tools needed by the build, like <code>MAKE</code> or <code>GREP</code>. If any such variable is specified, <code>configure</code> will use that value instead of trying to autodetect the tool. For instance, <code>bash configure MAKE=/opt/gnumake4.0/bin/make</code>.</p>
<p>If a configure argument exists, use that instead, e.g. use <code>--with-jtreg</code> instead of setting <code>JTREGEXE</code>.</p>
<p>Also note that, despite what autoconf claims, setting <code>CFLAGS</code> will not accomplish anything. Instead use <code>--with-extra-cflags</code> (and similar for <code>cxxflags</code> and <code>ldflags</code>).</p>
<h2 id="running-make">Running Make</h2>
@@ -480,7 +468,7 @@
<h3 id="make-control-variables">Make Control Variables</h3>
<p>It is possible to control <code>make</code> behavior by overriding the value of <code>make</code> variables, either on the command line or in the environment.</p>
<p>Normally, this is <strong>not recommended</strong>. If used improperly, it can lead to a broken build. Unless you're well versed in the build system, this is hard to use properly. Therefore, <code>make</code> will print a warning if this is detected.</p>
<p>However, there are a few <code>make</code> variables, known as <em>control variables</em> that are supposed to be overridden on the command line. These make up the &quot;make time&quot; configuration, as opposed to the &quot;configure time&quot; configuration.</p>
<p>However, there are a few <code>make</code> variables, known as <em>control variables</em> that are supposed to be overriden on the command line. These make up the &quot;make time&quot; configuration, as opposed to the &quot;configure time&quot; configuration.</p>
<h4 id="general-make-control-variables">General Make Control Variables</h4>
<ul>
<li><code>JOBS</code> - Specify the number of jobs to build with. See <a href="#build-performance">Build Performance</a>.</li>
@@ -629,154 +617,78 @@ cp: cannot stat `arm-linux-gnueabihf/libSM.so&#39;: No such file or directory
cp: cannot stat `arm-linux-gnueabihf/libXt.so&#39;: No such file or directory</code></pre></li>
<li><p>If the X11 libraries are not properly detected by <code>configure</code>, you can point them out by <code>--with-x</code>.</p></li>
</ul>
<h3 id="cross-compiling-with-debian-sysroots">Cross compiling with Debian sysroots</h3>
<h3 id="creating-and-using-sysroots-with-qemu-deboostrap">Creating And Using Sysroots With qemu-deboostrap</h3>
<p>Fortunately, you can create sysroots for foreign architectures with tools provided by your OS. On Debian/Ubuntu systems, one could use <code>qemu-deboostrap</code> to create the <em>target</em> system chroot, which would have the native libraries and headers specific to that <em>target</em> system. After that, we can use the cross-compiler on the <em>build</em> system, pointing into chroot to get the build dependencies right. This allows building for foreign architectures with native compilation speed.</p>
<p>For example, cross-compiling to AArch64 from x86_64 could be done like this:</p>
<ul>
<li><p>Install cross-compiler on the <em>build</em> system:</p>
<pre><code>apt install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu</code></pre></li>
<li><p>Create chroot on the <em>build</em> system, configuring it for <em>target</em> system:</p>
<pre><code>sudo qemu-debootstrap \
--arch=arm64 \
--verbose \
--include=fakeroot,symlinks,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng-dev,libffi-dev \
--resolve-deps \
buster \
~/sysroot-arm64 \
http://httpredir.debian.org/debian/</code></pre></li>
<li><p>Make sure the symlinks inside the newly created chroot point to proper locations:</p>
<pre><code>sudo chroot ~/sysroot-arm64 symlinks -cr .</code></pre></li>
<li><p>Configure and build with newly created chroot as sysroot/toolchain-path:</p>
<pre><code>sh ./configure \
--openjdk-target=aarch64-linux-gnu \
--with-sysroot=~/sysroot-arm64
make images
ls build/linux-aarch64-server-release/</code></pre></li>
<li>Install cross-compiler on the <em>build</em> system:</li>
</ul>
<p>The build does not create new files in that chroot, so it can be reused for multiple builds without additional cleanup.</p>
<p>The build system should automatically detect the toolchain paths and dependencies, but sometimes it might require a little nudge with:</p>
<pre><code>apt install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu</code></pre>
<ul>
<li><p>Native compilers: override <code>CC</code> or <code>CXX</code> for <code>./configure</code></p></li>
<li><p>Freetype lib location: override <code>--with-freetype-lib</code>, for example <code>${sysroot}/usr/lib/${target}/</code></p></li>
<li><p>Freetype includes location: override <code>--with-freetype-include</code> for example <code>${sysroot}/usr/include/freetype2/</code></p></li>
<li><p>X11 libraries location: override <code>--x-libraries</code>, for example <code>${sysroot}/usr/lib/${target}/</code></p></li>
<li>Create chroot on the <em>build</em> system, configuring it for <em>target</em> system:</li>
</ul>
<pre><code>sudo qemu-debootstrap --arch=arm64 --verbose \
--include=fakeroot,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng12-dev \
--resolve-deps jessie /chroots/arm64 http://httpredir.debian.org/debian/</code></pre>
<ul>
<li>Configure and build with newly created chroot as sysroot/toolchain-path:</li>
</ul>
<pre><code>CC=aarch64-linux-gnu-gcc CXX=aarch64-linux-gnu-g++ sh ./configure --openjdk-target=aarch64-linux-gnu --with-sysroot=/chroots/arm64/ --with-toolchain-path=/chroots/arm64/
make images
ls build/linux-aarch64-normal-server-release/</code></pre>
<p>The build does not create new files in that chroot, so it can be reused for multiple builds without additional cleanup.</p>
<p>Architectures that are known to successfully cross-compile like this are:</p>
<table>
<thead>
<tr class="header">
<th style="text-align: left;">Target</th>
<th style="text-align: left;">Debian tree</th>
<th style="text-align: left;">Debian arch</th>
<th style="text-align: left;"><code>CC</code></th>
<th style="text-align: left;"><code>CXX</code></th>
<th style="text-align: left;"><code>--arch=...</code></th>
<th style="text-align: left;"><code>--openjdk-target=...</code></th>
<th><code>--with-jvm-variants=...</code></th>
</tr>
</thead>
<tbody>
<tr class="odd">
<td style="text-align: left;">x86</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">default</td>
<td style="text-align: left;">default</td>
<td style="text-align: left;">i386</td>
<td style="text-align: left;">i386-linux-gnu</td>
<td>(all)</td>
</tr>
<tr class="even">
<td style="text-align: left;">arm</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">armhf</td>
<td style="text-align: left;">gcc-arm-linux-gnueabihf</td>
<td style="text-align: left;">g++-arm-linux-gnueabihf</td>
<td style="text-align: left;">armhf</td>
<td style="text-align: left;">arm-linux-gnueabihf</td>
<td>(all)</td>
</tr>
<tr class="odd">
<td style="text-align: left;">aarch64</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">gcc-aarch64-linux-gnu</td>
<td style="text-align: left;">g++-aarch64-linux-gnu</td>
<td style="text-align: left;">arm64</td>
<td style="text-align: left;">aarch64-linux-gnu</td>
<td>(all)</td>
</tr>
<tr class="even">
<td style="text-align: left;">ppc64le</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">ppc64el</td>
<td style="text-align: left;">gcc-powerpc64le-linux-gnu</td>
<td style="text-align: left;">g++-powerpc64le-linux-gnu</td>
<td style="text-align: left;">ppc64el</td>
<td style="text-align: left;">powerpc64le-linux-gnu</td>
<td>(all)</td>
</tr>
<tr class="odd">
<td style="text-align: left;">s390x</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">gcc-s390x-linux-gnu</td>
<td style="text-align: left;">g++-s390x-linux-gnu</td>
<td style="text-align: left;">s390x</td>
<td style="text-align: left;">s390x-linux-gnu</td>
<td>(all)</td>
</tr>
<tr class="even">
<td style="text-align: left;">mipsle</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">mipsel</td>
<td style="text-align: left;">mipsel-linux-gnu</td>
<td>zero</td>
</tr>
<tr class="odd">
<td style="text-align: left;">mips64le</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">mips64el</td>
<td style="text-align: left;">mips64el-linux-gnueabi64</td>
<td>zero</td>
</tr>
<tr class="even">
<td style="text-align: left;">armel</td>
<td style="text-align: left;">buster</td>
<td style="text-align: left;">arm</td>
<td style="text-align: left;">arm-linux-gnueabi</td>
<td>zero</td>
</tr>
<tr class="odd">
<td style="text-align: left;">ppc</td>
<td style="text-align: left;">sid</td>
<td style="text-align: left;">powerpc</td>
<td style="text-align: left;">powerpc-linux-gnu</td>
<td>zero</td>
</tr>
<tr class="even">
<td style="text-align: left;">ppc64be</td>
<td style="text-align: left;">sid</td>
<td style="text-align: left;">ppc64</td>
<td style="text-align: left;">powerpc64-linux-gnu</td>
<td>(all)</td>
</tr>
<tr class="odd">
<td style="text-align: left;">m68k</td>
<td style="text-align: left;">sid</td>
<td style="text-align: left;">m68k</td>
<td style="text-align: left;">m68k-linux-gnu</td>
<td>zero</td>
</tr>
<tr class="even">
<td style="text-align: left;">alpha</td>
<td style="text-align: left;">sid</td>
<td style="text-align: left;">alpha</td>
<td style="text-align: left;">alpha-linux-gnu</td>
<td>zero</td>
</tr>
<tr class="odd">
<td style="text-align: left;">sh4</td>
<td style="text-align: left;">sid</td>
<td style="text-align: left;">sh4</td>
<td style="text-align: left;">sh4-linux-gnu</td>
<td>zero</td>
</tr>
</tbody>
</table>
<p>Additional architectures might be supported by Debian/Ubuntu Ports.</p>
<h3 id="building-for-armaarch64">Building for ARM/aarch64</h3>
<p>A common cross-compilation target is the ARM CPU. When building for ARM, it is useful to set the ABI profile. A number of pre-defined ABI profiles are available using <code>--with-abi-profile</code>: arm-vfp-sflt, arm-vfp-hflt, arm-sflt, armv5-vfp-sflt, armv6-vfp-hflt. Note that soft-float ABIs are no longer properly supported by the JDK.</p>
<h3 id="building-for-musl">Building for musl</h3>
<p>Just like it's possible to cross-compile for a different CPU, it's possible to cross-compile for musl libc on a glibc-based <em>build</em> system. A devkit suitable for most target CPU architectures can be obtained from <a href="https://musl.cc">musl.cc</a>. After installing the required packages in the sysroot, configure the build with <code>--openjdk-target</code>:</p>
<pre><code>sh ./configure --with-jvm-variants=server \
--with-boot-jdk=$BOOT_JDK \
--with-build-jdk=$BUILD_JDK \
--openjdk-target=x86_64-unknown-linux-musl \
--with-devkit=$DEVKIT \
--with-sysroot=$SYSROOT</code></pre>
<p>and run <code>make</code> normally.</p>
<h3 id="verifying-the-build">Verifying the Build</h3>
<p>The build will end up in a directory named like <code>build/linux-arm-normal-server-release</code>.</p>
<p>Inside this build output directory, the <code>images/jdk</code> will contain the newly built JDK, for your <em>target</em> system.</p>
@@ -820,14 +732,14 @@ ls build/linux-aarch64-server-release/</code></pre></li>
=== Output from failing command(s) repeated here ===
* For target hotspot_variant-server_libjvm_objs_psMemoryPool.o:
/localhome/git/jdk-sandbox/hotspot/src/share/vm/services/psMemoryPool.cpp:1:1: error: &#39;failhere&#39; does not name a type
/localhome/hg/jdk9-sandbox/hotspot/src/share/vm/services/psMemoryPool.cpp:1:1: error: &#39;failhere&#39; does not name a type
... (rest of output omitted)
* All command lines available in /localhome/git/jdk-sandbox/build/linux-x64/make-support/failure-logs.
* All command lines available in /localhome/hg/jdk9-sandbox/build/linux-x64/make-support/failure-logs.
=== End of repeated output ===
=== Make failed targets repeated here ===
lib/CompileJvm.gmk:207: recipe for target &#39;/localhome/git/jdk-sandbox/build/linux-x64/hotspot/variant-server/libjvm/objs/psMemoryPool.o&#39; failed
lib/CompileJvm.gmk:207: recipe for target &#39;/localhome/hg/jdk9-sandbox/build/linux-x64/hotspot/variant-server/libjvm/objs/psMemoryPool.o&#39; failed
make/Main.gmk:263: recipe for target &#39;hotspot-server-libs&#39; failed
=== End of repeated output ===
@@ -854,7 +766,7 @@ Hint: If caused by a warning, try configure --disable-warnings-as-errors.</code>
<p>Here are a suggested list of things to try if you are having unexpected build problems. Each step requires more time than the one before, so try them in order. Most issues will be solved at step 1 or 2.</p>
<ol type="1">
<li><p>Make sure your repository is up-to-date</p>
<p>Run <code>git pull origin master</code> to make sure you have the latest changes.</p></li>
<p>Run <code>hg pull -u</code> to make sure you have the latest changes.</p></li>
<li><p>Clean build results</p>
<p>The simplest way to fix incremental rebuild issues is to run <code>make clean</code>. This will remove all build results, but not the configuration or any build system support artifacts. In most cases, this will solve build errors resulting from incremental build mismatches.</p></li>
<li><p>Completely clean the build directory.</p>
@@ -863,8 +775,8 @@ Hint: If caused by a warning, try configure --disable-warnings-as-errors.</code>
make dist-clean
bash configure $(cat current-configuration)
make</code></pre></li>
<li><p>Re-clone the Git repository</p>
<p>Sometimes the Git repository gets in a state that causes the product to be un-buildable. In such a case, the simplest solution is often the &quot;sledgehammer approach&quot;: delete the entire repository, and re-clone it. If you have local changes, save them first to a different location using <code>git format-patch</code>.</p></li>
<li><p>Re-clone the Mercurial repository</p>
<p>Sometimes the Mercurial repository gets in a state that causes the product to be un-buildable. In such a case, the simplest solution is often the &quot;sledgehammer approach&quot;: delete the entire repository, and re-clone it. If you have local changes, save them first to a different location using <code>hg export</code>.</p></li>
</ol>
<h3 id="specific-build-issues">Specific Build Issues</h3>
<h4 id="clock-skew">Clock Skew</h4>
@@ -879,12 +791,23 @@ Clock skew detected. Your build may be incomplete.</code></pre>
cannot create ... Permission denied
spawn failed</code></pre>
<p>This can be a sign of a Cygwin problem. See the information about solving problems in the <a href="#cygwin">Cygwin</a> section. Rebooting the computer might help temporarily.</p>
<h4 id="spaces-in-path">Spaces in Path</h4>
<p>On Windows, when configuring, <code>fixpath.sh</code> may report that some directory names have spaces. Usually, it assumes those directories have <a href="https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/fsutil-8dot3name">short paths</a>. You can run <code>fsutil file setshortname</code> in <code>cmd</code> on certain directories, such as <code>Microsoft Visual Studio</code> or <code>Windows Kits</code>, to assign arbitrary short paths so <code>configure</code> can access them.</p>
<h3 id="getting-help">Getting Help</h3>
<p>If none of the suggestions in this document helps you, or if you find what you believe is a bug in the build system, please contact the Build Group by sending a mail to <a href="mailto:build-dev@openjdk.java.net">build-dev@openjdk.java.net</a>. Please include the relevant parts of the configure and/or build log.</p>
<p>If you need general help or advice about developing for the JDK, you can also contact the Adoption Group. See the section on <a href="#contributing-to-openjdk">Contributing to OpenJDK</a> for more information.</p>
<h2 id="hints-and-suggestions-for-advanced-users">Hints and Suggestions for Advanced Users</h2>
<h3 id="setting-up-a-repository-for-pushing-changes-defpath">Setting Up a Repository for Pushing Changes (defpath)</h3>
<p>To help you prepare a proper push path for a Mercurial repository, there exists a useful tool known as <a href="http://openjdk.java.net/projects/code-tools/defpath">defpath</a>. It will help you setup a proper push path for pushing changes to the JDK.</p>
<p>Install the extension by cloning <code>http://hg.openjdk.java.net/code-tools/defpath</code> and updating your <code>.hgrc</code> file. Here's one way to do this:</p>
<pre><code>cd ~
mkdir hg-ext
cd hg-ext
hg clone http://hg.openjdk.java.net/code-tools/defpath
cat &lt;&lt; EOT &gt;&gt; ~/.hgrc
[extensions]
defpath=~/hg-ext/defpath/defpath.py
EOT</code></pre>
<p>You can now setup a proper push path using:</p>
<pre><code>hg defpath -d -u &lt;your OpenJDK username&gt;</code></pre>
<h3 id="bash-completion">Bash Completion</h3>
<p>The <code>configure</code> and <code>make</code> commands tries to play nice with bash command-line completion (using <code>&lt;tab&gt;</code> or <code>&lt;tab&gt;&lt;tab&gt;</code>). To use this functionality, make sure you enable completion in your <code>~/.bashrc</code> (see instructions for bash in your operating system).</p>
<p>Make completion will work out of the box, and will complete valid make targets. For instance, typing <code>make jdk-i&lt;tab&gt;</code> will complete to <code>make jdk-image</code>.</p>
@@ -908,7 +831,7 @@ sudo mv /tmp/configure /usr/local/bin</code></pre>
<p>If you update the repository and part of the configure script has changed, the build system will force you to re-run <code>configure</code>.</p>
<p>Most of the time, you will be fine by running <code>configure</code> again with the same arguments as the last time, which can easily be performed by <code>make reconfigure</code>. To simplify this, you can use the <code>CONF_CHECK</code> make control variable, either as <code>make CONF_CHECK=auto</code>, or by setting an environment variable. For instance, if you add <code>export CONF_CHECK=auto</code> to your <code>.bashrc</code> file, <code>make</code> will always run <code>reconfigure</code> automatically whenever the configure script has changed.</p>
<p>You can also use <code>CONF_CHECK=ignore</code> to skip the check for a needed configure update. This might speed up the build, but comes at the risk of an incorrect build result. This is only recommended if you know what you're doing.</p>
<p>From time to time, you will also need to modify the command line to <code>configure</code> due to changes. Use <code>make print-configuration</code> to show the command line used for your current configuration.</p>
<p>From time to time, you will also need to modify the command line to <code>configure</code> due to changes. Use <code>make print-configure</code> to show the command line used for your current configuration.</p>
<h3 id="using-fine-grained-make-targets">Using Fine-Grained Make Targets</h3>
<p>The default behavior for make is to create consistent and correct output, at the expense of build speed, if necessary.</p>
<p>If you are prepared to take some risk of an incorrect build, and know enough of the system to understand how things build and interact, you can speed up the build process considerably by instructing make to only build a portion of the product.</p>
@@ -937,6 +860,14 @@ sudo mv /tmp/configure /usr/local/bin</code></pre>
<h4 id="rebuilding-part-of-java.base-jdk_filter">Rebuilding Part of java.base (JDK_FILTER)</h4>
<p>If you are modifying files in <code>java.base</code>, which is the by far largest module in the JDK, then you need to rebuild all those files whenever a single file has changed. (This inefficiency will hopefully be addressed in JDK 10.)</p>
<p>As a hack, you can use the make control variable <code>JDK_FILTER</code> to specify a pattern that will be used to limit the set of files being recompiled. For instance, <code>make java.base JDK_FILTER=javax/crypto</code> (or, to combine methods, <code>make java.base-java-only JDK_FILTER=javax/crypto</code>) will limit the compilation to files in the <code>javax.crypto</code> package.</p>
<h3 id="learn-about-mercurial">Learn About Mercurial</h3>
<p>To become an efficient JDK developer, it is recommended that you invest in learning Mercurial properly. Here are some links that can get you started:</p>
<ul>
<li><a href="http://www.mercurial-scm.org/wiki/GitConcepts">Mercurial for git users</a></li>
<li><a href="http://www.mercurial-scm.org/wiki/Tutorial">The official Mercurial tutorial</a></li>
<li><a href="http://hginit.com/">hg init</a></li>
<li><a href="http://hgbook.red-bean.com/read/">Mercurial: The Definitive Guide</a></li>
</ul>
<h2 id="understanding-the-build-system">Understanding the Build System</h2>
<p>This section will give you a more technical description on the details of the build system.</p>
<h3 id="configurations">Configurations</h3>

View File

@@ -3,11 +3,11 @@
## TL;DR (Instructions for the Impatient)
If you are eager to try out building the JDK, these simple steps works most of
the time. They assume that you have installed Git (and Cygwin if running
the time. They assume that you have installed Mercurial (and Cygwin if running
on Windows) and cloned the top-level JDK repository that you want to build.
1. [Get the complete source code](#getting-the-source-code): \
`git clone https://git.openjdk.java.net/jdk/`
`hg clone http://hg.openjdk.java.net/jdk/jdk`
2. [Run configure](#running-configure): \
`bash configure`
@@ -47,14 +47,14 @@ JDK.
Make sure you are getting the correct version. As of JDK 10, the source is no
longer split into separate repositories so you only need to clone one single
repository. At the [OpenJDK Git site](https://git.openjdk.java.net/) you
repository. At the [OpenJDK Mercurial server](http://hg.openjdk.java.net/) you
can see a list of all available repositories. If you want to build an older version,
e.g. JDK 11, it is recommended that you get the `jdk11u` repo, which contains
incremental updates, instead of the `jdk11` repo, which was frozen at JDK 11 GA.
e.g. JDK 8, it is recommended that you get the `jdk8u` forest, which contains
incremental updates, instead of the `jdk8` forest, which was frozen at JDK 8 GA.
If you are new to Git, a good place to start is the book [Pro
Git](https://git-scm.com/book/en/v2). The rest of this document
assumes a working knowledge of Git.
If you are new to Mercurial, a good place to start is the [Mercurial Beginner's
Guide](http://www.mercurial-scm.org/guide). The rest of this document assumes a
working knowledge of Mercurial.
### Special Considerations
@@ -89,21 +89,9 @@ on where and how to check out the source code.
directory. This is especially important if your user name contains
spaces and/or mixed upper and lower case letters.
* You need to install a git client. You have two choices, Cygwin git or
Git for Windows. Unfortunately there are pros and cons with each choice.
* The Cygwin `git` client has no line ending issues and understands
Cygwin paths (which are used throughout the JDK build system).
However, it does not currently work well with the Skara CLI tooling.
Please see the [Skara wiki on Git clients](
https://wiki.openjdk.java.net/display/SKARA/Skara#Skara-Git) for
up-to-date information about the Skara git client support.
* The [Git for Windows](https://gitforwindows.org) client has issues
with line endings, and do not understand Cygwin paths. It does work
well with the Skara CLI tooling, however. To alleviate the line ending
problems, make sure you set `core.autocrlf` to `false` (this is asked
during installation).
* Clone the JDK repository using the Cygwin command line `hg` client
as instructed in this document. That is, do *not* use another Mercurial
client such as TortoiseHg.
Failure to follow this procedure might result in hard-to-debug build
problems.
@@ -185,7 +173,7 @@ likely be possible to support in a future version but that would require effort
to implement.)
Internally in the build system, all paths are represented as Unix-style paths,
e.g. `/cygdrive/c/git/jdk/Makefile` rather than `C:\git\jdk\Makefile`. This
e.g. `/cygdrive/c/hg/jdk9/Makefile` rather than `C:\hg\jdk9\Makefile`. This
rule also applies to input to the build system, e.g. in arguments to
`configure`. So, use `--with-msvcr-dll=/cygdrive/c/msvcr100.dll` rather than
`--with-msvcr-dll=c:\msvcr100.dll`. For details on this conversion, see the section
@@ -285,13 +273,6 @@ For rpm-based distributions (Fedora, Red Hat, etc), try this:
sudo yum groupinstall "Development Tools"
```
For Alpine Linux, aside from basic tooling, install the GNU versions of some
programs:
```
sudo apk add build-base bash grep zip
```
### AIX
Please consult the AIX section of the [Supported Build Platforms](
@@ -321,9 +302,9 @@ issues.
Operating system Toolchain version
------------------ -------------------------------------------------------
Linux gcc 10.2.0
Linux gcc 9.2.0
macOS Apple Xcode 10.1 (using clang 10.0.0)
Windows Microsoft Visual Studio 2019 update 16.7.2
Windows Microsoft Visual Studio 2019 update 16.5.3
All compilers are expected to be able to compile to the C99 language standard,
as some C99 features are used in the source code. Microsoft Visual Studio
@@ -335,14 +316,14 @@ features that it does support.
The minimum accepted version of gcc is 5.0. Older versions will generate a warning
by `configure` and are unlikely to work.
The JDK is currently known to be able to compile with at least version 10.2 of
The JDK is currently known to be able to compile with at least version 9.2 of
gcc.
In general, any version between these two should be usable.
### clang
The minimum accepted version of clang is 3.5. Older versions will not be
The minimum accepted version of clang is 3.2. Older versions will not be
accepted by `configure`.
To use clang instead of gcc on Linux, use `--with-toolchain-type=clang`.
@@ -374,16 +355,20 @@ available for this update.
### Microsoft Visual Studio
The minimum accepted version of Visual Studio is 2017. Older versions will not
be accepted by `configure` and will not work. The maximum accepted
version of Visual Studio is 2019.
The minimum accepted version of Visual Studio is 2010. Older versions will not
be accepted by `configure`. The maximum accepted version of Visual Studio is
2019. Versions older than 2017 are unlikely to continue working for long.
If you have multiple versions of Visual Studio installed, `configure` will by
default pick the latest. You can request a specific version to be used by
setting `--with-toolchain-version`, e.g. `--with-toolchain-version=2017`.
setting `--with-toolchain-version`, e.g. `--with-toolchain-version=2015`.
If you have Visual Studio installed but `configure` fails to detect it, it may
be because of [spaces in path](#spaces-in-path).
If you get `LINK: fatal error LNK1123: failure during conversion to COFF: file
invalid` when building using Visual Studio 2010, you have encountered
[KB2757355](http://support.microsoft.com/kb/2757355), a bug triggered by a
specific installation order. However, the solution suggested by the KB article
does not always resolve the problem. See [this stackoverflow discussion](
https://stackoverflow.com/questions/10888391) for other suggestions.
### IBM XL C/C++
@@ -453,8 +438,6 @@ rather than bundling the JDK's own copy.
libfreetype6-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
freetype-devel`.
* To install on Alpine Linux, try running `sudo apk add freetype-dev`.
* To install on macOS, try running `brew install freetype`.
Use `--with-freetype-include=<path>` and `--with-freetype-lib=<path>`
if `configure` does not automatically locate the platform FreeType files.
@@ -469,7 +452,6 @@ your operating system.
libcups2-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
cups-devel`.
* To install on Alpine Linux, try running `sudo apk add cups-dev`.
Use `--with-cups=<path>` if `configure` does not properly locate your CUPS
files.
@@ -483,8 +465,6 @@ Linux.
libx11-dev libxext-dev libxrender-dev libxrandr-dev libxtst-dev libxt-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
libXtst-devel libXt-devel libXrender-devel libXrandr-devel libXi-devel`.
* To install on Alpine Linux, try running `sudo apk add libx11-dev
libxext-dev libxrender-dev libxrandr-dev libxtst-dev libxt-dev`.
Use `--with-x=<path>` if `configure` does not properly locate your X11 files.
@@ -497,7 +477,6 @@ required on Linux. At least version 0.9.1 of ALSA is required.
libasound2-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
alsa-lib-devel`.
* To install on Alpine Linux, try running `sudo apk add alsa-lib-dev`.
Use `--with-alsa=<path>` if `configure` does not properly locate your ALSA
files.
@@ -512,7 +491,6 @@ Hotspot.
libffi-dev`.
* To install on an rpm-based Linux, try running `sudo yum install
libffi-devel`.
* To install on Alpine Linux, try running `sudo apk add libffi-dev`.
Use `--with-libffi=<path>` if `configure` does not properly locate your libffi
files.
@@ -528,7 +506,6 @@ platforms. At least version 2.69 is required.
autoconf`.
* To install on an rpm-based Linux, try running `sudo yum install
autoconf`.
* To install on Alpine Linux, try running `sudo apk add autoconf`.
* To install on macOS, try running `brew install autoconf`.
* To install on Windows, try running `<path to Cygwin setup>/setup-x86_64 -q
-P autoconf`.
@@ -657,7 +634,7 @@ features, use `bash configure --help=short` instead.)
On Linux, BSD and AIX, it is possible to override where Java by default
searches for runtime/JNI libraries. This can be useful in situations where
there is a special shared directory for system JNI libraries. This setting
can in turn be overridden at runtime by setting the `java.library.path` property.
can in turn be overriden at runtime by setting the `java.library.path` property.
* `--with-jni-libpath=<path>` - Use the specified path as a default
when searching for runtime libraries.
@@ -723,7 +700,7 @@ hard to use properly. Therefore, `configure` will print a warning if this is
detected.
However, there are a few `configure` variables, known as *control variables*
that are supposed to be overridden on the command line. These are variables that
that are supposed to be overriden on the command line. These are variables that
describe the location of tools needed by the build, like `MAKE` or `GREP`. If
any such variable is specified, `configure` will use that value instead of
trying to autodetect the tool. For instance, `bash configure
@@ -803,7 +780,7 @@ broken build. Unless you're well versed in the build system, this is hard to
use properly. Therefore, `make` will print a warning if this is detected.
However, there are a few `make` variables, known as *control variables* that
are supposed to be overridden on the command line. These make up the "make time"
are supposed to be overriden on the command line. These make up the "make time"
configuration, as opposed to the "configure time" configuration.
#### General Make Control Variables
@@ -1090,7 +1067,7 @@ Note that X11 is needed even if you only want to build a headless JDK.
* If the X11 libraries are not properly detected by `configure`, you can
point them out by `--with-x`.
### Cross compiling with Debian sysroots
### Creating And Using Sysroots With qemu-deboostrap
Fortunately, you can create sysroots for foreign architectures with tools
provided by your OS. On Debian/Ubuntu systems, one could use `qemu-deboostrap` to
@@ -1102,67 +1079,38 @@ for foreign architectures with native compilation speed.
For example, cross-compiling to AArch64 from x86_64 could be done like this:
* Install cross-compiler on the *build* system:
```
apt install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu
```
```
apt install g++-aarch64-linux-gnu gcc-aarch64-linux-gnu
```
* Create chroot on the *build* system, configuring it for *target* system:
```
sudo qemu-debootstrap \
--arch=arm64 \
--verbose \
--include=fakeroot,symlinks,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng-dev,libffi-dev \
--resolve-deps \
buster \
~/sysroot-arm64 \
http://httpredir.debian.org/debian/
```
* Make sure the symlinks inside the newly created chroot point to proper locations:
```
sudo chroot ~/sysroot-arm64 symlinks -cr .
```
```
sudo qemu-debootstrap --arch=arm64 --verbose \
--include=fakeroot,build-essential,libx11-dev,libxext-dev,libxrender-dev,libxrandr-dev,libxtst-dev,libxt-dev,libcups2-dev,libfontconfig1-dev,libasound2-dev,libfreetype6-dev,libpng12-dev \
--resolve-deps jessie /chroots/arm64 http://httpredir.debian.org/debian/
```
* Configure and build with newly created chroot as sysroot/toolchain-path:
```
sh ./configure \
--openjdk-target=aarch64-linux-gnu \
--with-sysroot=~/sysroot-arm64
make images
ls build/linux-aarch64-server-release/
```
```
CC=aarch64-linux-gnu-gcc CXX=aarch64-linux-gnu-g++ sh ./configure --openjdk-target=aarch64-linux-gnu --with-sysroot=/chroots/arm64/ --with-toolchain-path=/chroots/arm64/
make images
ls build/linux-aarch64-normal-server-release/
```
The build does not create new files in that chroot, so it can be reused for multiple builds
without additional cleanup.
The build system should automatically detect the toolchain paths and dependencies, but sometimes
it might require a little nudge with:
* Native compilers: override `CC` or `CXX` for `./configure`
* Freetype lib location: override `--with-freetype-lib`, for example `${sysroot}/usr/lib/${target}/`
* Freetype includes location: override `--with-freetype-include` for example `${sysroot}/usr/include/freetype2/`
* X11 libraries location: override `--x-libraries`, for example `${sysroot}/usr/lib/${target}/`
Architectures that are known to successfully cross-compile like this are:
Target Debian tree Debian arch `--openjdk-target=...` `--with-jvm-variants=...`
------------ ------------ ------------- ------------------------ --------------
x86 buster i386 i386-linux-gnu (all)
arm buster armhf arm-linux-gnueabihf (all)
aarch64 buster arm64 aarch64-linux-gnu (all)
ppc64le buster ppc64el powerpc64le-linux-gnu (all)
s390x buster s390x s390x-linux-gnu (all)
mipsle buster mipsel mipsel-linux-gnu zero
mips64le buster mips64el mips64el-linux-gnueabi64 zero
armel buster arm arm-linux-gnueabi zero
ppc sid powerpc powerpc-linux-gnu zero
ppc64be sid ppc64 powerpc64-linux-gnu (all)
m68k sid m68k m68k-linux-gnu zero
alpha sid alpha alpha-linux-gnu zero
sh4 sid sh4 sh4-linux-gnu zero
Target `CC` `CXX` `--arch=...` `--openjdk-target=...`
------------ ------------------------- --------------------------- ------------- -----------------------
x86 default default i386 i386-linux-gnu
armhf gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf armhf arm-linux-gnueabihf
aarch64 gcc-aarch64-linux-gnu g++-aarch64-linux-gnu arm64 aarch64-linux-gnu
ppc64el gcc-powerpc64le-linux-gnu g++-powerpc64le-linux-gnu ppc64el powerpc64le-linux-gnu
s390x gcc-s390x-linux-gnu g++-s390x-linux-gnu s390x s390x-linux-gnu
Additional architectures might be supported by Debian/Ubuntu Ports.
### Building for ARM/aarch64
@@ -1172,25 +1120,6 @@ available using `--with-abi-profile`: arm-vfp-sflt, arm-vfp-hflt, arm-sflt,
armv5-vfp-sflt, armv6-vfp-hflt. Note that soft-float ABIs are no longer
properly supported by the JDK.
### Building for musl
Just like it's possible to cross-compile for a different CPU, it's possible to
cross-compile for musl libc on a glibc-based *build* system.
A devkit suitable for most target CPU architectures can be obtained from
[musl.cc](https://musl.cc). After installing the required packages in the
sysroot, configure the build with `--openjdk-target`:
```
sh ./configure --with-jvm-variants=server \
--with-boot-jdk=$BOOT_JDK \
--with-build-jdk=$BUILD_JDK \
--openjdk-target=x86_64-unknown-linux-musl \
--with-devkit=$DEVKIT \
--with-sysroot=$SYSROOT
```
and run `make` normally.
### Verifying the Build
The build will end up in a directory named like
@@ -1315,14 +1244,14 @@ ERROR: Build failed for target 'hotspot' in configuration 'linux-x64' (exit code
=== Output from failing command(s) repeated here ===
* For target hotspot_variant-server_libjvm_objs_psMemoryPool.o:
/localhome/git/jdk-sandbox/hotspot/src/share/vm/services/psMemoryPool.cpp:1:1: error: 'failhere' does not name a type
/localhome/hg/jdk9-sandbox/hotspot/src/share/vm/services/psMemoryPool.cpp:1:1: error: 'failhere' does not name a type
... (rest of output omitted)
* All command lines available in /localhome/git/jdk-sandbox/build/linux-x64/make-support/failure-logs.
* All command lines available in /localhome/hg/jdk9-sandbox/build/linux-x64/make-support/failure-logs.
=== End of repeated output ===
=== Make failed targets repeated here ===
lib/CompileJvm.gmk:207: recipe for target '/localhome/git/jdk-sandbox/build/linux-x64/hotspot/variant-server/libjvm/objs/psMemoryPool.o' failed
lib/CompileJvm.gmk:207: recipe for target '/localhome/hg/jdk9-sandbox/build/linux-x64/hotspot/variant-server/libjvm/objs/psMemoryPool.o' failed
make/Main.gmk:263: recipe for target 'hotspot-server-libs' failed
=== End of repeated output ===
@@ -1420,7 +1349,7 @@ order. Most issues will be solved at step 1 or 2.
1. Make sure your repository is up-to-date
Run `git pull origin master` to make sure you have the latest changes.
Run `hg pull -u` to make sure you have the latest changes.
2. Clean build results
@@ -1445,13 +1374,13 @@ order. Most issues will be solved at step 1 or 2.
make
```
4. Re-clone the Git repository
4. Re-clone the Mercurial repository
Sometimes the Git repository gets in a state that causes the product
Sometimes the Mercurial repository gets in a state that causes the product
to be un-buildable. In such a case, the simplest solution is often the
"sledgehammer approach": delete the entire repository, and re-clone it.
If you have local changes, save them first to a different location using
`git format-patch`.
`hg export`.
### Specific Build Issues
@@ -1483,15 +1412,6 @@ This can be a sign of a Cygwin problem. See the information about solving
problems in the [Cygwin](#cygwin) section. Rebooting the computer might help
temporarily.
#### Spaces in Path
On Windows, when configuring, `fixpath.sh` may report that some directory
names have spaces. Usually, it assumes those directories have
[short paths](https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/fsutil-8dot3name).
You can run `fsutil file setshortname` in `cmd` on certain directories, such as
`Microsoft Visual Studio` or `Windows Kits`, to assign arbitrary short paths so
`configure` can access them.
### Getting Help
If none of the suggestions in this document helps you, or if you find what you
@@ -1505,6 +1425,33 @@ contact the Adoption Group. See the section on [Contributing to OpenJDK](
## Hints and Suggestions for Advanced Users
### Setting Up a Repository for Pushing Changes (defpath)
To help you prepare a proper push path for a Mercurial repository, there exists
a useful tool known as [defpath](
http://openjdk.java.net/projects/code-tools/defpath). It will help you setup a
proper push path for pushing changes to the JDK.
Install the extension by cloning
`http://hg.openjdk.java.net/code-tools/defpath` and updating your `.hgrc` file.
Here's one way to do this:
```
cd ~
mkdir hg-ext
cd hg-ext
hg clone http://hg.openjdk.java.net/code-tools/defpath
cat << EOT >> ~/.hgrc
[extensions]
defpath=~/hg-ext/defpath/defpath.py
EOT
```
You can now setup a proper push path using:
```
hg defpath -d -u <your OpenJDK username>
```
### Bash Completion
The `configure` and `make` commands tries to play nice with bash command-line
@@ -1570,8 +1517,8 @@ update. This might speed up the build, but comes at the risk of an incorrect
build result. This is only recommended if you know what you're doing.
From time to time, you will also need to modify the command line to `configure`
due to changes. Use `make print-configuration` to show the command line used
for your current configuration.
due to changes. Use `make print-configure` to show the command line used for
your current configuration.
### Using Fine-Grained Make Targets
@@ -1645,6 +1592,16 @@ instance, `make java.base JDK_FILTER=javax/crypto` (or, to combine methods,
`make java.base-java-only JDK_FILTER=javax/crypto`) will limit the compilation
to files in the `javax.crypto` package.
### Learn About Mercurial
To become an efficient JDK developer, it is recommended that you invest in
learning Mercurial properly. Here are some links that can get you started:
* [Mercurial for git users](http://www.mercurial-scm.org/wiki/GitConcepts)
* [The official Mercurial tutorial](http://www.mercurial-scm.org/wiki/Tutorial)
* [hg init](http://hginit.com/)
* [Mercurial: The Definitive Guide](http://hgbook.red-bean.com/read/)
## Understanding the Build System
This section will give you a more technical description on the details of the

View File

@@ -1,343 +0,0 @@
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" lang="" xml:lang="">
<head>
<meta charset="utf-8" />
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>HotSpot Coding Style</title>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
</style>
<link rel="stylesheet" href="../make/data/docs-resources/resources/jdk-default.css" />
<!--[if lt IE 9]>
<script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
<![endif]-->
</head>
<body>
<header id="title-block-header">
<h1 class="title">HotSpot Coding Style</h1>
</header>
<nav id="TOC">
<ul>
<li><a href="#introduction">Introduction</a><ul>
<li><a href="#why-care-about-style">Why Care About Style?</a></li>
<li><a href="#counterexamples-and-updates">Counterexamples and Updates</a></li>
</ul></li>
<li><a href="#structure-and-formatting">Structure and Formatting</a><ul>
<li><a href="#factoring-and-class-design">Factoring and Class Design</a></li>
<li><a href="#source-files">Source Files</a></li>
<li><a href="#jtreg-tests">JTReg Tests</a></li>
<li><a href="#naming">Naming</a></li>
<li><a href="#commenting">Commenting</a></li>
<li><a href="#macros">Macros</a></li>
<li><a href="#whitespace">Whitespace</a></li>
<li><a href="#miscellaneous">Miscellaneous</a></li>
</ul></li>
<li><a href="#use-of-c-features">Use of C++ Features</a><ul>
<li><a href="#error-handling">Error Handling</a></li>
<li><a href="#rtti-runtime-type-information">RTTI (Runtime Type Information)</a></li>
<li><a href="#memory-allocation">Memory Allocation</a></li>
<li><a href="#class-inheritance">Class Inheritance</a></li>
<li><a href="#namespaces">Namespaces</a></li>
<li><a href="#c-standard-library">C++ Standard Library</a></li>
<li><a href="#type-deduction">Type Deduction</a></li>
<li><a href="#expression-sfinae">Expression SFINAE</a></li>
<li><a href="#enum">enum</a></li>
<li><a href="#thread_local">thread_local</a></li>
<li><a href="#nullptr">nullptr</a></li>
<li><a href="#atomic">&lt;atomic&gt;</a></li>
<li><a href="#uniform-initialization">Uniform Initialization</a></li>
<li><a href="#additional-permitted-features">Additional Permitted Features</a></li>
<li><a href="#excluded-features">Excluded Features</a></li>
<li><a href="#undecided-features">Undecided Features</a></li>
</ul></li>
</ul>
</nav>
<h2 id="introduction">Introduction</h2>
<p>This is a collection of rules, guidelines, and suggestions for writing HotSpot code. Following these will help new code fit in with existing HotSpot code, making it easier to read and maintain. Failure to follow these guidelines may lead to discussion during code reviews, if not outright rejection of a change.</p>
<h3 id="why-care-about-style">Why Care About Style?</h3>
<p>Some programmers seem to have lexers and even C preprocessors installed directly behind their eyeballs. The rest of us require code that is not only functionally correct but also easy to read. More than that, since there is no one style for easy-to-read code, and since a mashup of many styles is just as confusing as no style at all, it is important for coders to be conscious of the many implicit stylistic choices that historically have gone into the HotSpot code base.</p>
<p>Some of these guidelines are driven by the cross-platform requirements for HotSpot. Shared code must work on a variety of platforms, and may encounter deficiencies in some. Using platform conditionalization in shared code is usually avoided, while shared code is strongly preferred to multiple platform-dependent implementations, so some language features may be recommended against.</p>
<p>Some of the guidelines here are relatively arbitrary choices among equally plausible alternatives. The purpose of stating and enforcing these rules is largely to provide a consistent look to the code. That consistency makes the code more readable by avoiding non-functional distractions from the interesting functionality.</p>
<p>When changing pre-existing code, it is reasonable to adjust it to match these conventions. Exception: If the pre-existing code clearly conforms locally to its own peculiar conventions, it is not worth reformatting the whole thing. Also consider separating changes that make extensive stylistic updates from those which make functional changes.</p>
<h3 id="counterexamples-and-updates">Counterexamples and Updates</h3>
<p>Many of the guidelines mentioned here have (sometimes widespread) counterexamples in the HotSpot code base. Finding a counterexample is not sufficient justification for new code to follow the counterexample as a precedent, since readers of your code will rightfully expect your code to follow the greater bulk of precedents documented here.</p>
<p>Occasionally a guideline mentioned here may be just out of synch with the actual HotSpot code base. If you find that a guideline is consistently contradicted by a large number of counterexamples, please bring it up for discussion and possible change. The architectural rule, of course, is &quot;When in Rome do as the Romans&quot;. Sometimes in the suburbs of Rome the rules are a little different; these differences can be pointed out here.</p>
<p>Proposed changes should be discussed on the <a href="mailto:hotspot-dev@openjdk.java.net">HotSpot Developers</a> mailing list, and approved by <a href="https://en.wikipedia.org/wiki/Rough_consensus">rough consensus</a> of the <a href="https://openjdk.java.net/census#hotspot">HotSpot Group</a> Members. The Group Lead determines whether consensus has been reached. Changes are likely to be cautious and incremental, since HotSpot coders have been using these guidelines for years.</p>
<h2 id="structure-and-formatting">Structure and Formatting</h2>
<h3 id="factoring-and-class-design">Factoring and Class Design</h3>
<ul>
<li><p>Group related code together, so readers can concentrate on one section of one file.</p></li>
<li><p>Classes are the primary code structuring mechanism. Place related functionality in a class, or a set of related classes. Use of either namespaces or public non-member functions is rare in HotSpot code. Static non-member functions are not uncommon.</p></li>
<li><p>If a class <code>FooBar</code> is going to be used in more than one place, put it a file named fooBar.hpp and fooBar.cpp. If the class is a sidekick to a more important class <code>BazBat</code>, it can go in bazBat.hpp.</p></li>
<li><p>Put a member function <code>FooBar::bang</code> into the same file that defined <code>FooBar</code>, or its associated <em>.inline.hpp or </em>.cpp file.</p></li>
<li><p>Use public accessor functions for member variables accessed outside the class.</p></li>
<li><p>Assign names to constant literals and use the names instead.</p></li>
<li><p>Keep functions small, a screenful at most. Split out chunks of logic into file-local classes or static functions if needed.</p></li>
<li><p>Factor away nonessential complexity into local inline helper functions and helper classes.</p></li>
<li><p>Think clearly about internal invariants that apply to each class, and document them in the form of asserts within member functions.</p></li>
<li><p>Make simple, self-evident contracts for member functions. If you cannot communicate a simple contract, redesign the class.</p></li>
<li><p>Implement classes as if expecting rough usage by clients. Check for incorrect usage of a class using <code>assert(...)</code>, <code>guarantee(...)</code>, <code>ShouldNotReachHere()</code> and comments wherever needed. Performance is almost never a reason to omit asserts.</p></li>
<li><p>When possible, design as if for reusability. This forces a clear design of the class's externals, and clean hiding of its internals.</p></li>
<li><p>Initialize all variables and data structures to a known state. If a class has a constructor, initialize it there.</p></li>
<li><p>Do no optimization before its time. Prove the need to optimize.</p></li>
<li><p>When you must defactor to optimize, preserve as much structure as possible. If you must hand-inline some name, label the local copy with the original name.</p></li>
<li><p>If you need to use a hidden detail (e.g., a structure offset), name it (as a constant or function) in the class that owns it.</p></li>
<li><p>Don't use the Copy and Paste keys to replicate more than a couple lines of code. Name what you must repeat.</p></li>
<li><p>If a class needs a member function to change a user-visible attribute, the change should be done with a &quot;setter&quot; accessor matched to the simple &quot;getter&quot;.</p></li>
</ul>
<h3 id="source-files">Source Files</h3>
<ul>
<li><p>All source files must have a globally unique basename. The build system depends on this uniqueness.</p></li>
<li><p>Do not put non-trivial function implementations in .hpp files. If the implementation depends on other .hpp files, put it in a .cpp or a .inline.hpp file.</p></li>
<li><p>.inline.hpp files should only be included in .cpp or .inline.hpp files.</p></li>
<li><p>All .inline.hpp files should include their corresponding .hpp file as the first include line. Declarations needed by other files should be put in the .hpp file, and not in the .inline.hpp file. This rule exists to resolve problems with circular dependencies between .inline.hpp files.</p></li>
<li><p>All .cpp files include precompiled.hpp as the first include line.</p></li>
<li><p>precompiled.hpp is just a build time optimization, so don't rely on it to resolve include problems.</p></li>
<li><p>Keep the include lines alphabetically sorted.</p></li>
<li><p>Put conditional inclusions (<code>#if ...</code>) at the end of the include list.</p></li>
</ul>
<h3 id="jtreg-tests">JTReg Tests</h3>
<ul>
<li><p>JTReg tests should have meaningful names.</p></li>
<li><p>JTReg tests associated with specific bugs should be tagged with the <code>@bug</code> keyword in the test description.</p></li>
<li><p>JTReg tests should be organized by component or feature under <code>test/</code>, in a directory hierarchy that generally follows that of the <code>src/</code> directory. There may be additional subdirectories to further categorize tests by feature. This structure makes it easy to run a collection of tests associated with a specific feature by specifying the associated directory as the source of the tests to run.</p>
<ul>
<li>Some (older) tests use the associated bug number in the directory name, the test name, or both. That naming style should no longer be used, with existing tests using that style being candidates for migration.</li>
</ul></li>
</ul>
<h3 id="naming">Naming</h3>
<ul>
<li><p>The length of a name may be correlated to the size of its scope. In particular, short names (even single letter names) may be fine in a small scope, but are usually inappropriate for larger scopes.</p></li>
<li><p>Prefer whole words rather than abbreviations, unless the abbreviation is more widely used than the long form in the code's domain.</p></li>
<li><p>Choose names consistently. Do not introduce spurious variations. Abbreviate corresponding terms to a consistent length.</p></li>
<li><p>Global names must be unique, to avoid <a href="https://en.cppreference.com/w/cpp/language/definition" title="One Definition Rule">One Definition Rule</a> (ODR) violations. A common prefixing scheme for related global names is often used. (This is instead of using namespaces, which are mostly avoided in HotSpot.)</p></li>
<li><p>Don't give two names to the semantically same thing. But use different names for semantically different things, even if they are representationally the same. (So use meaningful <code>typedef</code> or template alias names where appropriate.)</p></li>
<li><p>When choosing names, avoid categorical nouns like &quot;variable&quot;, &quot;field&quot;, &quot;parameter&quot;, &quot;value&quot;, and verbs like &quot;compute&quot;, &quot;get&quot;. (<code>storeValue(int param)</code> is bad.)</p></li>
<li><p>Type names and global names should use mixed-case with the first letter of each word capitalized (<code>FooBar</code>).</p></li>
<li><p>Embedded abbreviations in otherwise mixed-case names are usually capitalized entirely rather than being treated as a single word with only the initial letter capitalized, e.g. &quot;HTML&quot; rather than &quot;Html&quot;.</p></li>
<li><p>Function and local variable names use lowercase with words separated by a single underscore (<code>foo_bar</code>).</p></li>
<li><p>Class data member names have a leading underscore, and use lowercase with words separated by a single underscore (<code>_foo_bar</code>).</p></li>
<li><p>Constant names may be upper-case or mixed-case, according to historical necessity. (Note: There are many examples of constants with lowercase names.)</p></li>
<li><p>Constant names should follow an existing pattern, and must have a distinct appearance from other names in related APIs.</p></li>
<li><p>Class and type names should be noun phrases. Consider an &quot;er&quot; suffix for a class that represents an action.</p></li>
<li><p>Function names should be verb phrases that reflect changes of state known to a class's user, or else noun phrases if they cause no change of state visible to the class's user.</p></li>
<li><p>Getter accessor names are noun phrases, with no &quot;<code>get_</code>&quot; noise word. Boolean getters can also begin with &quot;<code>is_</code>&quot; or &quot;<code>has_</code>&quot;. Member function for reading data members usually have the same name as the data member, exclusive of the leading underscore.</p></li>
<li><p>Setter accessor names prepend &quot;<code>set_</code>&quot; to the getter name.</p></li>
<li><p>Other member function names are verb phrases, as if commands to the receiver.</p></li>
<li><p>Avoid leading underscores (as &quot;<code>_oop</code>&quot;) except in cases required above. (Names with leading underscores can cause portability problems.)</p></li>
</ul>
<h3 id="commenting">Commenting</h3>
<ul>
<li><p>Clearly comment subtle fixes.</p></li>
<li><p>Clearly comment tricky classes and functions.</p></li>
<li><p>If you have to choose between commenting code and writing wiki content, comment the code. Link from the wiki to the source file if it makes sense.</p></li>
<li><p>As a general rule don't add bug numbers to comments (they would soon overwhelm the code). But if the bug report contains significant information that can't reasonably be added as a comment, then refer to the bug report.</p></li>
<li><p>Personal names are discouraged in the source code, which is a team product.</p></li>
</ul>
<h3 id="macros">Macros</h3>
<ul>
<li><p>You can almost always use an inline function or class instead of a macro. Use a macro only when you really need it.</p></li>
<li><p>Templates may be preferable to multi-line macros. (There may be subtle performance effects with templates on some platforms; revert to macros if absolutely necessary.)</p></li>
<li><p><code>#ifdef</code>s should not be used to introduce platform-specific code into shared code (except for <code>_LP64</code>). They must be used to manage header files, in the pattern found at the top of every source file. They should be used mainly for major build features, including <code>PRODUCT</code>, <code>ASSERT</code>, <code>_LP64</code>, <code>INCLUDE_SERIALGC</code>, <code>COMPILER1</code>, etc.</p></li>
<li><p>For build features such as <code>PRODUCT</code>, use <code>#ifdef PRODUCT</code> for multiple-line inclusions or exclusions.</p></li>
<li><p>For short inclusions or exclusions based on build features, use macros like <code>PRODUCT_ONLY</code> and <code>NOT_PRODUCT</code>. But avoid using them with multiple-line arguments, since debuggers do not handle that well.</p></li>
<li><p>Use <code>CATCH</code>, <code>THROW</code>, etc. for HotSpot-specific exception processing.</p></li>
</ul>
<h3 id="whitespace">Whitespace</h3>
<ul>
<li><p>In general, don't change whitespace unless it improves readability or consistency. Gratuitous whitespace changes will make integrations and backports more difficult.</p></li>
<li><p>Use One-True-Brace-Style. The opening brace for a function or class is normally at the end of the line; it is sometimes moved to the beginning of the next line for emphasis. Substatements are enclosed in braces, even if there is only a single statement. Extremely simple one-line statements may drop braces around a substatement.</p></li>
<li><p>Indentation levels are two columns.</p></li>
<li><p>There is no hard line length limit. That said, bear in mind that excessively long lines can cause difficulties. Some people like to have multiple side-by-side windows in their editors, and long lines may force them to choose among unpleasant options. They can use wide windows, reducing the number that can fit across the screen, and wasting a lot of screen real estate because most lines are not that long. Alternatively, they can have more windows across the screen, with long lines wrapping (or worse, requiring scrolling to see in their entirety), which is harder to read. Similar issues exist for side-by-side code reviews.</p></li>
<li><p>Tabs are not allowed in code. Set your editor accordingly.<br> (Emacs: <code>(setq-default indent-tabs-mode nil)</code>.)</p></li>
<li><p>Use good taste to break lines and align corresponding tokens on adjacent lines.</p></li>
<li><p>Use spaces around operators, especially comparisons and assignments. (Relaxable for boolean expressions and high-precedence operators in classic math-style formulas.)</p></li>
<li><p>Put spaces on both sides of control flow keywords <code>if</code>, <code>else</code>, <code>for</code>, <code>switch</code>, etc. Don't add spaces around the associated <em>control</em> expressions. Examples:</p>
<pre><code>while (test_foo(args...)) { // Yes
while(test_foo(args...)) { // No, missing space after while
while ( test_foo(args...) ) { // No, excess spaces around control</code></pre></li>
<li><p>Use extra parentheses in expressions whenever operator precedence seems doubtful. Always use parentheses in shift/mask expressions (<code>&lt;&lt;</code>, <code>&amp;</code>, <code>|</code>). Don't add whitespace immediately inside parentheses.</p></li>
<li><p>Use more spaces and blank lines between larger constructs, such as classes or function definitions.</p></li>
<li><p>If the surrounding code has any sort of vertical organization, adjust new lines horizontally to be consistent with that organization. (E.g., trailing backslashes on long macro definitions often align.)</p></li>
</ul>
<h3 id="miscellaneous">Miscellaneous</h3>
<ul>
<li><p>Use the <a href="https://en.cppreference.com/w/cpp/language/raii" title="Resource Acquisition Is Initialization">Resource Acquisition Is Initialization</a> (RAII) design pattern to manage bracketed critical sections. See class <code>ResourceMark</code> for an example.</p></li>
<li>Avoid implicit conversions to <code>bool</code>.
<ul>
<li>Use <code>bool</code> for boolean values.</li>
<li>Do not use ints or pointers as (implicit) booleans with <code>&amp;&amp;</code>, <code>||</code>, <code>if</code>, <code>while</code>. Instead, compare explicitly, i.e. <code>if (x != 0)</code> or <code>if (ptr != nullptr)</code>, etc.</li>
<li>Do not use declarations in <em>condition</em> forms, i.e. don't use <code>if (T v = value) { ... }</code>.</li>
</ul></li>
<li><p>Use functions from globalDefinitions.hpp and related files when performing bitwise operations on integers. Do not code directly as C operators, unless they are extremely simple. (Examples: <code>align_up</code>, <code>is_power_of_2</code>, <code>exact_log2</code>.)</p></li>
<li><p>Use arrays with abstractions supporting range checks.</p></li>
<li><p>Always enumerate all cases in a switch statement or provide a default case. It is ok to have an empty default with comment.</p></li>
</ul>
<h2 id="use-of-c-features">Use of C++ Features</h2>
<p>HotSpot was originally written in a subset of the C++98/03 language. More recently, support for C++14 is provided, though again, HotSpot only uses a subset. (Backports to JDK versions lacking support for more recent Standards must of course stick with the original C++98/03 subset.)</p>
<p>This section describes that subset. Features from the C++98/03 language may be used unless explicitly excluded here. Features from C++11 and C++14 may be explicitly permitted or explicitly excluded, and discussed accordingly here. There is a third category, undecided features, about which HotSpot developers have not yet reached a consensus, or perhaps have not discussed at all. Use of these features is also excluded.</p>
<p>(The use of some features may not be immediately obvious and may slip in anyway, since the compiler will accept them. The code review process is the main defense against this.)</p>
<p>Some features are discussed in their own subsection, typically to provide more extensive discussion or rationale for limitations. Features that don't have their own subsection are listed in omnibus feature sections for permitted, excluded, and undecided features.</p>
<p>Lists of new features for C++11 and C++14, along with links to their descriptions, can be found in the online documentation for some of the compilers and libraries. The C++14 Standard is the definitive description.</p>
<ul>
<li><a href="https://gcc.gnu.org/projects/cxx-status.html">C++ Standards Support in GCC</a></li>
<li><a href="https://clang.llvm.org/cxx_status.html">C++ Support in Clang</a></li>
<li><a href="https://docs.microsoft.com/en-us/cpp/visual-cpp-language-conformance">Visual C++ Language Conformance</a></li>
<li><a href="https://gcc.gnu.org/onlinedocs/libstdc++/manual/status.html">libstdc++ Status</a></li>
<li><a href="https://libcxx.llvm.org/cxx1y_status.html">libc++ Status</a></li>
</ul>
<p>As a rule of thumb, permitting features which simplify writing code and, especially, reading code, is encouraged.</p>
<p>Similar discussions for some other projects:</p>
<ul>
<li><p><a href="https://google.github.io/styleguide/cppguide.html">Google C++ Style Guide</a> — Currently (2020) targeting C++17.</p></li>
<li><p><a href="https://chromium-cpp.appspot.com">C++11 and C++14 use in Chromium</a> — Categorizes features as allowed, banned, or to be discussed.</p></li>
<li><p><a href="https://llvm.org/docs/CodingStandards.html">llvm Coding Standards</a> — Currently (2020) targeting C++14.</p></li>
<li><p><a href="https://firefox-source-docs.mozilla.org/code-quality/coding-style/using_cxx_in_firefox_code.html">Using C++ in Mozilla code</a> — C++17 support is required for recent versions (2020).</p></li>
</ul>
<h3 id="error-handling">Error Handling</h3>
<p>Do not use exceptions. Exceptions are disabled by the build configuration for some platforms.</p>
<p>Rationale: There is significant concern over the performance cost of exceptions and their usage model and implications for maintainable code. That's not just a matter of history that has been fixed; there remain questions and problems even today (2019). See, for example, <a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2018/p0709r0.pdf">Zero cost deterministic exceptions</a>. Because of this, HotSpot has always used a build configuration that disables exceptions where that is available. As a result, HotSpot code uses error handling mechanisms such as two-phase construction, factory functions, returning error codes, and immediate termination. Even if the cost of exceptions were not a concern, the existing body of code was not written with exception safety in mind. Making HotSpot exception safe would be a very large undertaking.</p>
<p>In addition to the usual alternatives to exceptions, HotSpot provides its own exception mechanism. This is based on a set of macros defined in utilities/exceptions.hpp.</p>
<h3 id="rtti-runtime-type-information">RTTI (Runtime Type Information)</h3>
<p>Do not use <a href="https://en.wikipedia.org/wiki/Run-time_type_information" title="Runtime Type Information">Runtime Type Information</a> (RTTI). <a href="https://en.wikipedia.org/wiki/Run-time_type_information" title="Runtime Type Information">RTTI</a> is disabled by the build configuration for some platforms. Among other things, this means <code>dynamic_cast</code> cannot be used.</p>
<p>Rationale: Other than to implement exceptions (which HotSpot doesn't use), most potential uses of <a href="https://en.wikipedia.org/wiki/Run-time_type_information" title="Runtime Type Information">RTTI</a> are better done via virtual functions. Some of the remainder can be replaced by bespoke mechanisms. The cost of the additional runtime data structures needed to support <a href="https://en.wikipedia.org/wiki/Run-time_type_information" title="Runtime Type Information">RTTI</a> are deemed not worthwhile, given the alternatives.</p>
<h3 id="memory-allocation">Memory Allocation</h3>
<p>Do not use the standard global allocation and deallocation functions (operator new and related functions). Use of these functions by HotSpot code is disabled for some platforms.</p>
<p>Rationale: HotSpot often uses &quot;resource&quot; or &quot;arena&quot; allocation. Even where heap allocation is used, the standard global functions are avoided in favor of wrappers around malloc and free that support the VM's Native Memory Tracking (NMT) feature.</p>
<p>Native memory allocation failures are often treated as non-recoverable. The place where &quot;out of memory&quot; is (first) detected may be an innocent bystander, unrelated to the actual culprit.</p>
<h3 id="class-inheritance">Class Inheritance</h3>
<p>Use public single inheritance.</p>
<p>Prefer composition rather than non-public inheritance.</p>
<p>Restrict inheritance to the &quot;is-a&quot; case; use composition rather than non-is-a related inheritance.</p>
<p>Avoid multiple inheritance. Never use virtual inheritance.</p>
<h3 id="namespaces">Namespaces</h3>
<p>Avoid using namespaces. HotSpot code normally uses &quot;all static&quot; classes rather than namespaces for grouping. An &quot;all static&quot; class is not instantiable, has only static members, and is normally derived (possibly indirectly) from the helper class <code>AllStatic</code>.</p>
<p>Benefits of using such classes include:</p>
<ul>
<li><p>Provides access control for members, which is unavailable with namespaces.</p></li>
<li><p>Avoids <a href="https://en.cppreference.com/w/cpp/language/adl" title="Argument Dependent Lookup">Argument Dependent Lookup</a> (ADL).</p></li>
<li><p>Closed for additional members. Namespaces allow names to be added in multiple contexts, making it harder to see the complete API.</p></li>
</ul>
<p>Namespaces should be used only in cases where one of those &quot;benefits&quot; is actually a hindrance.</p>
<p>In particular, don't use anonymous namespaces. They seem like they should be useful, and indeed have some real benefits for naming and generated code size on some platforms. Unfortunately, debuggers don't seem to like them at all.</p>
<p><a href="https://groups.google.com/forum/#!topic/mozilla.dev.platform/KsaG3lEEaRM" class="uri">https://groups.google.com/forum/#!topic/mozilla.dev.platform/KsaG3lEEaRM</a><br> Suggests Visual Studio debugger might not be able to refer to anonymous namespace symbols, so can't set breakpoints in them. Though the discussion seems to go back and forth on that.</p>
<p><a href="https://firefox-source-docs.mozilla.org/code-quality/coding-style/coding_style_cpp.html" class="uri">https://firefox-source-docs.mozilla.org/code-quality/coding-style/coding_style_cpp.html</a><br> Search for &quot;Anonymous namespaces&quot; Suggests preferring &quot;static&quot; to anonymous namespaces where applicable, because of poor debugger support for anonymous namespaces.</p>
<p><a href="https://sourceware.org/bugzilla/show_bug.cgi?id=16874" class="uri">https://sourceware.org/bugzilla/show_bug.cgi?id=16874</a><br> Bug for similar gdb problems.</p>
<h3 id="c-standard-library">C++ Standard Library</h3>
<p>Avoid using the C++ Standard Library.</p>
<p>Historically, HotSpot has mostly avoided use of the Standard Library.</p>
<p>(It used to be impossible to use most of it in shared code, because the build configuration for Solaris with Solaris Studio made all but a couple of pieces inaccessible. Support for header-only parts was added in mid-2017. Support for Solaris was removed in 2020.)</p>
<p>Some reasons for this include</p>
<ul>
<li><p>Exceptions. Perhaps the largest core issue with adopting the use of Standard Library facilities is exceptions. HotSpot does not use exceptions and, for platforms which allow doing so, builds with them turned off. Many Standard Library facilities implicitly or explicitly use exceptions.</p></li>
<li><p><code>assert</code>. An issue that is quickly encountered is the <code>assert</code> macro name collision (<a href="https://bugs.openjdk.java.net/browse/JDK-8007770">JDK-8007770</a>). Some mechanism for addressing this would be needed before much of the Standard Library could be used. (Not all Standard Library implementations use assert in header files, but some do.)</p></li>
<li><p>Memory allocation. HotSpot requires explicit control over where allocations occur. The C++98/03 <code>std::allocator</code> class is too limited to support our usage. (Changes in more recent Standards may remove this limitation.)</p></li>
<li><p>Implementation vagaries. Bugs, or simply different implementation choices, can lead to different behaviors among the various Standard Libraries we need to deal with.</p></li>
<li><p>Inconsistent naming conventions. HotSpot and the C++ Standard use different naming conventions. The coexistence of those different conventions might appear jarring and reduce readability.</p></li>
</ul>
<p>There are a few exceptions to this rule.</p>
<ul>
<li><code>#include &lt;new&gt;</code> to use placement <code>new</code>, <code>std::nothrow</code>, and <code>std::nothrow_t</code>.</li>
<li><code>#include &lt;limits&gt;</code> to use <code>std::numeric_limits</code>.</li>
<li><code>#include &lt;type_traits&gt;</code>.</li>
<li><code>#include &lt;cstddef&gt;</code> to use <code>std::nullptr_t</code>.</li>
</ul>
<p>TODO: Rather than directly #including (permitted) Standard Library headers, use a convention of #including wrapper headers (in some location like hotspot/shared/stdcpp). This provides a single place for dealing with issues we might have for any given header, esp. platform-specific issues.</p>
<h3 id="type-deduction">Type Deduction</h3>
<p>Use type deduction only if it makes the code clearer or safer. Do not use it merely to avoid the inconvenience of writing an explicit type, unless that type is itself difficult to write. An example of the latter is a function template return type that depends on template parameters in a non-trivial way.</p>
<p>There are several contexts where types are deduced.</p>
<ul>
<li><p>Function argument deduction. This is always permitted, and indeed encouraged. It is nearly always better to allow the type of a function template argument to be deduced rather than explicitly specified.</p></li>
<li><p><code>auto</code> variable declarations (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1984.pdf">n1984</a>)<br> For local variables, this can be used to make the code clearer by eliminating type information that is obvious or irrelevant. Excessive use can make code much harder to understand.</p></li>
<li><p>Function return type deduction (<a href="https://isocpp.org/files/papers/N3638.html">n3638</a>)<br> Only use if the function body has a very small number of <code>return</code> statements, and generally relatively little other code.</p></li>
<li><p>Generic lambdas. Lambdas are not (yet) permitted.</p></li>
<li><p>Lambda init captures. Lambdas are not (yet) permitted.</p></li>
</ul>
<h3 id="expression-sfinae">Expression SFINAE</h3>
<p><a href="https://en.cppreference.com/w/cpp/language/sfinae" title="Substitution Failure Is Not An Error">Substitution Failure Is Not An Error</a> (SFINAE) is a template metaprogramming technique that makes use of template parameter substitution failures to make compile-time decisions.</p>
<p>C++11 relaxed the rules for what constitutes a hard-error when attempting to substitute template parameters with template arguments, making most deduction errors be substitution errors; see (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2634.html">n2634</a>). This makes <a href="https://en.cppreference.com/w/cpp/language/sfinae" title="Substitution Failure Is Not An Error">SFINAE</a> more powerful and easier to use. However, the implementation complexity for this change is significant, and this seems to be a place where obscure corner-case bugs in various compilers can be found. So while this feature can (and indeed should) be used (and would be difficult to avoid), caution should be used when pushing to extremes.</p>
<p>Here are a few closely related example bugs:<br> <a href="https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95468" class="uri">https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95468</a><br> <a href="https://developercommunity.visualstudio.com/content/problem/396562/sizeof-deduced-type-is-sometimes-not-a-constant-ex.html" class="uri">https://developercommunity.visualstudio.com/content/problem/396562/sizeof-deduced-type-is-sometimes-not-a-constant-ex.html</a></p>
<h3 id="enum">enum</h3>
<p>Where appropriate, <em>scoped-enums</em> should be used. (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2347.pdf">n2347</a>)</p>
<p>Use of <em>unscoped-enums</em> is permitted, though ordinary constants may be preferable when the automatic initializer feature isn't used.</p>
<p>The underlying type (the <em>enum-base</em>) of an unscoped enum type should always be specified explicitly. When unspecified, the underlying type is dependent on the range of the enumerator values and the platform.</p>
<p>The underlying type of a <em>scoped-enum</em> should also be specified explicitly if conversions may be applied to values of that type.</p>
<p>Due to bugs in certain (very old) compilers, there is widespread use of enums and avoidance of in-class initialization of static integral constant members. Compilers having such bugs are no longer supported. Except where an enum is semantically appropriate, new code should use integral constants.</p>
<h3 id="thread_local">thread_local</h3>
<p>Do not use <code>thread_local</code> (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2659.htm">n2659</a>); instead, use the HotSpot macro <code>THREAD_LOCAL</code>. The initializer must be a constant expression.</p>
<p>As was discussed in the review for <a href="https://mail.openjdk.java.net/pipermail/hotspot-dev/2019-September/039487.html">JDK-8230877</a>, <code>thread_local</code> allows dynamic initialization and destruction semantics. However, that support requires a run-time penalty for references to non-function-local <code>thread_local</code> variables defined in a different translation unit, even if they don't need dynamic initialization. Dynamic initialization and destruction of namespace-scoped thread local variables also has the same ordering problems as for ordinary namespace-scoped variables.</p>
<h3 id="nullptr">nullptr</h3>
<p>Prefer <code>nullptr</code> (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2431.pdf">n2431</a>) to <code>NULL</code>. Don't use (constexpr or literal) 0 for pointers.</p>
<p>For historical reasons there are widespread uses of both <code>NULL</code> and of integer 0 as a pointer value.</p>
<h3 id="atomic">&lt;atomic&gt;</h3>
<p>Do not use facilities provided by the <code>&lt;atomic&gt;</code> header (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html">n2427</a>), (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2752.htm">n2752</a>); instead, use the HotSpot <code>Atomic</code> class and related facilities.</p>
<p>Atomic operations in HotSpot code must have semantics which are consistent with those provided by the JDK's compilers for Java. There are platform-specific implementation choices that a C++ compiler might make or change that are outside the scope of the C++ Standard, and might differ from what the Java compilers implement.</p>
<p>In addition, HotSpot <code>Atomic</code> has a concept of &quot;conservative&quot; memory ordering, which may differ from (may be stronger than) sequentially consistent. There are algorithms in HotSpot that are believed to rely on that ordering.</p>
<h3 id="uniform-initialization">Uniform Initialization</h3>
<p>The use of <em>uniform initialization</em> (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm">n2672</a>), also known as <em>brace initialization</em>, is permitted.</p>
<p>Some relevant sections from cppreference.com:</p>
<ul>
<li><a href="https://en.cppreference.com/w/cpp/language/initialization">initialization</a></li>
<li><a href="https://en.cppreference.com/w/cpp/language/value_initialization">value initialization</a></li>
<li><a href="https://en.cppreference.com/w/cpp/language/direct_initialization">direct initialization</a></li>
<li><a href="https://en.cppreference.com/w/cpp/language/list_initialization">list initialization</a></li>
<li><a href="https://en.cppreference.com/w/cpp/language/aggregate_initialization">aggregate initialization</a></li>
</ul>
<p>Although related, the use of <code>std::initializer_list</code> remains forbidden, as part of the avoidance of the C++ Standard Library in HotSpot code.</p>
<h3 id="additional-permitted-features">Additional Permitted Features</h3>
<ul>
<li><p><code>constexpr</code> (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf">n2235</a>) (<a href="https://isocpp.org/files/papers/N3652.html">n3652</a>)</p></li>
<li><p>Sized deallocation (<a href="https://isocpp.org/files/papers/n3778.html">n3778</a>)</p></li>
<li><p>Variadic templates (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2242.pdf">n2242</a>) (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2555.pdf">n2555</a>)</p></li>
<li><p>Static assertions (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1720.html">n1720</a>)</p></li>
<li><p><code>decltype</code> (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2343.pdf">n2343</a>) (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3276.pdf">n3276</a>)</p></li>
<li><p>Right angle brackets (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1757.html">n1757</a>)</p></li>
<li><p>Default template arguments for function templates (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#226">CWG D226</a>)</p></li>
<li><p>Template aliases (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf">n2258</a>)</p></li>
<li><p>Delegating constructors (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1986.pdf">n1986</a>)</p></li>
<li><p>Explicit conversion operators (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf">n2437</a>)</p></li>
<li><p>Standard Layout Types (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2342.htm">n2342</a>)</p></li>
<li><p>Defaulted and deleted functions (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2346.htm">n2346</a>)</p></li>
<li><p>Dynamic initialization and destruction with concurrency (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2660.htm">n2660</a>)</p></li>
<li><p><code>final</code> virtual specifiers for classes and virtual functions (<a href="http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm">n2928</a>), (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm">n3206</a>), (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm">n3272</a>)</p></li>
<li><p><code>override</code> virtual specifiers for virtual functions (<a href="http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm">n2928</a>), (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm">n3206</a>), (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm">n3272</a>)</p></li>
<li><p>Local and unnamed types as template parameters (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2657.htm">n2657</a>)</p></li>
<li><p>Range-based <code>for</code> loops (<a href="http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2930.html">n2930</a>) (<a href="https://en.cppreference.com/w/cpp/language/range-for">range-for</a>)</p></li>
</ul>
<h3 id="excluded-features">Excluded Features</h3>
<ul>
<li>New string and character literals
<ul>
<li>New character types (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2249.html">n2249</a>)</li>
<li>Unicode string literals (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm">n2442</a>)</li>
<li>Raw string literals (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm">n2442</a>)</li>
<li>Universal character name literals (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2170.html">n2170</a>)</li>
</ul>
<p>HotSpot doesn't need any of the new character and string literal types.</p></li>
<li><p>User-defined literals (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2765.pdf">n2765</a>) — User-defined literals should not be added casually, but only through a proposal to add a specific UDL.</p></li>
<li><p>Inline namespaces (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2535.htm">n2535</a>) — HotSpot makes very limited use of namespaces.</p></li>
<li><p><code>using namespace</code> directives. In particular, don't use <code>using namespace std;</code> to avoid needing to qualify Standard Library names.</p></li>
<li><p>Propagating exceptions (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2179.html">n2179</a>) — HotSpot does not permit the use of exceptions, so this feature isn't useful.</p></li>
<li><p>Avoid namespace-scoped variables with non-constexpr initialization. In particular, avoid variables with types requiring non-trivial initialization or destruction. Initialization order problems can be difficult to deal with and lead to surprises, as can destruction ordering. HotSpot doesn't generally try to cleanup on exit, and running destructors at exit can also lead to problems.</p></li>
<li><p><code>[[deprecated]]</code> attribute (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3760.html">n3760</a>) — Not relevant in HotSpot code.</p></li>
<li><p>Avoid most operator overloading, preferring named functions. When operator overloading is used, ensure the semantics conform to the normal expected behavior of the operation.</p></li>
<li><p>Avoid most implicit conversion constructors and (implicit or explicit) conversion operators. (Note that conversion to <code>bool</code> isn't needed in HotSpot code because of the &quot;no implicit boolean&quot; guideline.)</p></li>
<li><p>Avoid covariant return types.</p></li>
<li><p>Avoid <code>goto</code> statements.</p></li>
</ul>
<h3 id="undecided-features">Undecided Features</h3>
<p>This list is incomplete; it serves to explicitly call out some features that have not yet been discussed.</p>
<ul>
<li><p>Trailing return type syntax for functions (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2541.htm">n2541</a>)</p></li>
<li><p>Variable templates (<a href="https://isocpp.org/files/papers/N3651.pdf">n3651</a>)</p></li>
<li><p>Member initializers and aggregates (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3653.html">n3653</a>)</p></li>
<li><p><code>[[noreturn]]</code> attribute (<a href="http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf">n2761</a>)</p></li>
<li><p>Rvalue references and move semantics</p></li>
<li><p>Lambdas</p></li>
</ul>
</body>
</html>

View File

@@ -1,856 +0,0 @@
% HotSpot Coding Style
## Introduction
This is a collection of rules, guidelines, and suggestions for writing
HotSpot code. Following these will help new code fit in with existing
HotSpot code, making it easier to read and maintain. Failure to
follow these guidelines may lead to discussion during code reviews, if
not outright rejection of a change.
### Why Care About Style?
Some programmers seem to have lexers and even C preprocessors
installed directly behind their eyeballs. The rest of us require code
that is not only functionally correct but also easy to read. More than
that, since there is no one style for easy-to-read code, and since a
mashup of many styles is just as confusing as no style at all, it is
important for coders to be conscious of the many implicit stylistic
choices that historically have gone into the HotSpot code base.
Some of these guidelines are driven by the cross-platform requirements
for HotSpot. Shared code must work on a variety of platforms, and may
encounter deficiencies in some. Using platform conditionalization in
shared code is usually avoided, while shared code is strongly
preferred to multiple platform-dependent implementations, so some
language features may be recommended against.
Some of the guidelines here are relatively arbitrary choices among
equally plausible alternatives. The purpose of stating and enforcing
these rules is largely to provide a consistent look to the code. That
consistency makes the code more readable by avoiding non-functional
distractions from the interesting functionality.
When changing pre-existing code, it is reasonable to adjust it to
match these conventions. Exception: If the pre-existing code clearly
conforms locally to its own peculiar conventions, it is not worth
reformatting the whole thing. Also consider separating changes that
make extensive stylistic updates from those which make functional
changes.
### Counterexamples and Updates
Many of the guidelines mentioned here have (sometimes widespread)
counterexamples in the HotSpot code base. Finding a counterexample is
not sufficient justification for new code to follow the counterexample
as a precedent, since readers of your code will rightfully expect your
code to follow the greater bulk of precedents documented here.
Occasionally a guideline mentioned here may be just out of synch with
the actual HotSpot code base. If you find that a guideline is
consistently contradicted by a large number of counterexamples, please
bring it up for discussion and possible change. The architectural
rule, of course, is "When in Rome do as the Romans". Sometimes in the
suburbs of Rome the rules are a little different; these differences
can be pointed out here.
Proposed changes should be discussed on the
[HotSpot Developers](mailto:hotspot-dev@openjdk.java.net) mailing
list, and approved by
[rough consensus](https://en.wikipedia.org/wiki/Rough_consensus) of
the [HotSpot Group](https://openjdk.java.net/census#hotspot) Members.
The Group Lead determines whether consensus has been reached.
Changes are likely to be cautious and incremental, since HotSpot
coders have been using these guidelines for years.
## Structure and Formatting
### Factoring and Class Design
* Group related code together, so readers can concentrate on one
section of one file.
* Classes are the primary code structuring mechanism. Place related
functionality in a class, or a set of related classes. Use of either
namespaces or public non-member functions is rare in HotSpot code.
Static non-member functions are not uncommon.
* If a class `FooBar` is going to be used in more than one place, put it
a file named fooBar.hpp and fooBar.cpp. If the class is a sidekick
to a more important class `BazBat`, it can go in bazBat.hpp.
* Put a member function `FooBar::bang` into the same file that defined
`FooBar`, or its associated *.inline.hpp or *.cpp file.
* Use public accessor functions for member variables accessed
outside the class.
* Assign names to constant literals and use the names instead.
* Keep functions small, a screenful at most. Split out chunks of
logic into file-local classes or static functions if needed.
* Factor away nonessential complexity into local inline helper
functions and helper classes.
* Think clearly about internal invariants that apply to each class,
and document them in the form of asserts within member functions.
* Make simple, self-evident contracts for member functions. If you cannot
communicate a simple contract, redesign the class.
* Implement classes as if expecting rough usage by clients. Check for
incorrect usage of a class using `assert(...)`, `guarantee(...)`,
`ShouldNotReachHere()` and comments wherever needed. Performance is
almost never a reason to omit asserts.
* When possible, design as if for reusability. This forces a clear
design of the class's externals, and clean hiding of its internals.
* Initialize all variables and data structures to a known state. If a
class has a constructor, initialize it there.
* Do no optimization before its time. Prove the need to optimize.
* When you must defactor to optimize, preserve as much structure as
possible. If you must hand-inline some name, label the local copy with
the original name.
* If you need to use a hidden detail (e.g., a structure offset), name
it (as a constant or function) in the class that owns it.
* Don't use the Copy and Paste keys to replicate more than a couple
lines of code. Name what you must repeat.
* If a class needs a member function to change a user-visible attribute, the
change should be done with a "setter" accessor matched to the simple
"getter".
### Source Files
* All source files must have a globally unique basename. The build
system depends on this uniqueness.
* Do not put non-trivial function implementations in .hpp files. If
the implementation depends on other .hpp files, put it in a .cpp or
a .inline.hpp file.
* .inline.hpp files should only be included in .cpp or .inline.hpp
files.
* All .inline.hpp files should include their corresponding .hpp file as
the first include line. Declarations needed by other files should be put
in the .hpp file, and not in the .inline.hpp file. This rule exists to
resolve problems with circular dependencies between .inline.hpp files.
* All .cpp files include precompiled.hpp as the first include line.
* precompiled.hpp is just a build time optimization, so don't rely on
it to resolve include problems.
* Keep the include lines alphabetically sorted.
* Put conditional inclusions (`#if ...`) at the end of the include list.
### JTReg Tests
* JTReg tests should have meaningful names.
* JTReg tests associated with specific bugs should be tagged with the
`@bug` keyword in the test description.
* JTReg tests should be organized by component or feature under
`test/`, in a directory hierarchy that generally follows that of the
`src/` directory. There may be additional subdirectories to further
categorize tests by feature. This structure makes it easy to run a
collection of tests associated with a specific feature by specifying
the associated directory as the source of the tests to run.
* Some (older) tests use the associated bug number in the directory
name, the test name, or both. That naming style should no longer be
used, with existing tests using that style being candidates for migration.
### Naming
* The length of a name may be correlated to the size of its scope. In
particular, short names (even single letter names) may be fine in a
small scope, but are usually inappropriate for larger scopes.
* Prefer whole words rather than abbreviations, unless the
abbreviation is more widely used than the long form in the code's
domain.
* Choose names consistently. Do not introduce spurious
variations. Abbreviate corresponding terms to a consistent length.
* Global names must be unique, to avoid [One Definition Rule][ODR] (ODR)
violations. A common prefixing scheme for related global names is
often used. (This is instead of using namespaces, which are mostly
avoided in HotSpot.)
* Don't give two names to the semantically same thing. But use
different names for semantically different things, even if they are
representationally the same. (So use meaningful `typedef` or template
alias names where appropriate.)
* When choosing names, avoid categorical nouns like "variable",
"field", "parameter", "value", and verbs like "compute", "get".
(`storeValue(int param)` is bad.)
* Type names and global names should use mixed-case with the first
letter of each word capitalized (`FooBar`).
* Embedded abbreviations in
otherwise mixed-case names are usually capitalized entirely rather
than being treated as a single word with only the initial letter
capitalized, e.g. "HTML" rather than "Html".
* Function and local variable names use lowercase with words separated
by a single underscore (`foo_bar`).
* Class data member names have a leading underscore, and use lowercase
with words separated by a single underscore (`_foo_bar`).
* Constant names may be upper-case or mixed-case, according to
historical necessity. (Note: There are many examples of constants
with lowercase names.)
* Constant names should follow an existing pattern, and must have a
distinct appearance from other names in related APIs.
* Class and type names should be noun phrases. Consider an "er" suffix
for a class that represents an action.
* Function names should be verb phrases that reflect changes of state
known to a class's user, or else noun phrases if they cause no change
of state visible to the class's user.
* Getter accessor names are noun phrases, with no "`get_`" noise
word. Boolean getters can also begin with "`is_`" or "`has_`". Member
function for reading data members usually have the same name as the
data member, exclusive of the leading underscore.
* Setter accessor names prepend "`set_`" to the getter name.
* Other member function names are verb phrases, as if commands to the receiver.
* Avoid leading underscores (as "`_oop`") except in cases required
above. (Names with leading underscores can cause portability
problems.)
### Commenting
* Clearly comment subtle fixes.
* Clearly comment tricky classes and functions.
* If you have to choose between commenting code and writing wiki
content, comment the code. Link from the wiki to the source file if
it makes sense.
* As a general rule don't add bug numbers to comments (they would soon
overwhelm the code). But if the bug report contains significant
information that can't reasonably be added as a comment, then refer to
the bug report.
* Personal names are discouraged in the source code, which is a team
product.
### Macros
* You can almost always use an inline function or class instead of a
macro. Use a macro only when you really need it.
* Templates may be preferable to multi-line macros. (There may be
subtle performance effects with templates on some platforms; revert
to macros if absolutely necessary.)
* `#ifdef`s should not be used to introduce platform-specific code
into shared code (except for `_LP64`). They must be used to manage
header files, in the pattern found at the top of every source
file. They should be used mainly for major build features, including
`PRODUCT`, `ASSERT`, `_LP64`, `INCLUDE_SERIALGC`, `COMPILER1`, etc.
* For build features such as `PRODUCT`, use `#ifdef PRODUCT` for
multiple-line inclusions or exclusions.
* For short inclusions or exclusions based on build features, use
macros like `PRODUCT_ONLY` and `NOT_PRODUCT`. But avoid using them
with multiple-line arguments, since debuggers do not handle that
well.
* Use `CATCH`, `THROW`, etc. for HotSpot-specific exception processing.
### Whitespace
* In general, don't change whitespace unless it improves readability
or consistency. Gratuitous whitespace changes will make integrations
and backports more difficult.
* Use One-True-Brace-Style. The opening brace for a function or class
is normally at the end of the line; it is sometimes moved to the
beginning of the next line for emphasis. Substatements are enclosed
in braces, even if there is only a single statement. Extremely simple
one-line statements may drop braces around a substatement.
* Indentation levels are two columns.
* There is no hard line length limit. That said, bear in mind that
excessively long lines can cause difficulties. Some people like to
have multiple side-by-side windows in their editors, and long lines
may force them to choose among unpleasant options. They can use wide
windows, reducing the number that can fit across the screen, and
wasting a lot of screen real estate because most lines are not that
long. Alternatively, they can have more windows across the screen,
with long lines wrapping (or worse, requiring scrolling to see in
their entirety), which is harder to read. Similar issues exist for
side-by-side code reviews.
* Tabs are not allowed in code. Set your editor accordingly.<br>
(Emacs: `(setq-default indent-tabs-mode nil)`.)
* Use good taste to break lines and align corresponding tokens on
adjacent lines.
* Use spaces around operators, especially comparisons and
assignments. (Relaxable for boolean expressions and high-precedence
operators in classic math-style formulas.)
* Put spaces on both sides of control flow keywords `if`, `else`,
`for`, `switch`, etc. Don't add spaces around the associated
_control_ expressions. Examples:
```
while (test_foo(args...)) { // Yes
while(test_foo(args...)) { // No, missing space after while
while ( test_foo(args...) ) { // No, excess spaces around control
```
* Use extra parentheses in expressions whenever operator precedence
seems doubtful. Always use parentheses in shift/mask expressions
(`<<`, `&`, `|`). Don't add whitespace immediately inside
parentheses.
* Use more spaces and blank lines between larger constructs, such as
classes or function definitions.
* If the surrounding code has any sort of vertical organization,
adjust new lines horizontally to be consistent with that
organization. (E.g., trailing backslashes on long macro definitions
often align.)
### Miscellaneous
* Use the [Resource Acquisition Is Initialization][RAII] (RAII)
design pattern to manage bracketed critical
sections. See class `ResourceMark` for an example.
* Avoid implicit conversions to `bool`.
* Use `bool` for boolean values.
* Do not use ints or pointers as (implicit) booleans with `&&`, `||`,
`if`, `while`. Instead, compare explicitly, i.e. `if (x != 0)` or
`if (ptr != nullptr)`, etc.
* Do not use declarations in _condition_ forms, i.e. don't use
`if (T v = value) { ... }`.
* Use functions from globalDefinitions.hpp and related files when
performing bitwise
operations on integers. Do not code directly as C operators, unless
they are extremely simple. (Examples: `align_up`, `is_power_of_2`,
`exact_log2`.)
* Use arrays with abstractions supporting range checks.
* Always enumerate all cases in a switch statement or provide a default
case. It is ok to have an empty default with comment.
## Use of C++ Features
HotSpot was originally written in a subset of the C++98/03 language.
More recently, support for C++14 is provided, though again,
HotSpot only uses a subset. (Backports to JDK versions lacking
support for more recent Standards must of course stick with the
original C++98/03 subset.)
This section describes that subset. Features from the C++98/03
language may be used unless explicitly excluded here. Features from
C++11 and C++14 may be explicitly permitted or explicitly excluded,
and discussed accordingly here. There is a third category, undecided
features, about which HotSpot developers have not yet reached a
consensus, or perhaps have not discussed at all. Use of these
features is also excluded.
(The use of some features may not be immediately obvious and may slip
in anyway, since the compiler will accept them. The code review
process is the main defense against this.)
Some features are discussed in their own subsection, typically to provide
more extensive discussion or rationale for limitations. Features that
don't have their own subsection are listed in omnibus feature sections
for permitted, excluded, and undecided features.
Lists of new features for C++11 and C++14, along with links to their
descriptions, can be found in the online documentation for some of the
compilers and libraries. The C++14 Standard is the definitive
description.
* [C++ Standards Support in GCC](https://gcc.gnu.org/projects/cxx-status.html)
* [C++ Support in Clang](https://clang.llvm.org/cxx_status.html)
* [Visual C++ Language Conformance](https://docs.microsoft.com/en-us/cpp/visual-cpp-language-conformance)
* [libstdc++ Status](https://gcc.gnu.org/onlinedocs/libstdc++/manual/status.html)
* [libc++ Status](https://libcxx.llvm.org/cxx1y_status.html)
As a rule of thumb, permitting features which simplify writing code
and, especially, reading code, is encouraged.
Similar discussions for some other projects:
* [Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) &mdash;
Currently (2020) targeting C++17.
* [C++11 and C++14 use in Chromium](https://chromium-cpp.appspot.com) &mdash;
Categorizes features as allowed, banned, or to be discussed.
* [llvm Coding Standards](https://llvm.org/docs/CodingStandards.html) &mdash;
Currently (2020) targeting C++14.
* [Using C++ in Mozilla code](https://firefox-source-docs.mozilla.org/code-quality/coding-style/using_cxx_in_firefox_code.html) &mdash;
C++17 support is required for recent versions (2020).
### Error Handling
Do not use exceptions. Exceptions are disabled by the build configuration
for some platforms.
Rationale: There is significant concern over the performance cost of
exceptions and their usage model and implications for maintainable code.
That's not just a matter of history that has been fixed; there remain
questions and problems even today (2019). See, for example, [Zero cost
deterministic
exceptions](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2018/p0709r0.pdf).
Because of this, HotSpot has always used a build configuration that disables
exceptions where that is available. As a result, HotSpot code uses error
handling mechanisms such as two-phase construction, factory functions,
returning error codes, and immediate termination. Even if the cost of
exceptions were not a concern, the existing body of code was not written with
exception safety in mind. Making HotSpot exception safe would be a very large
undertaking.
In addition to the usual alternatives to exceptions, HotSpot provides its
own exception mechanism. This is based on a set of macros defined in
utilities/exceptions.hpp.
### RTTI (Runtime Type Information)
Do not use [Runtime Type Information][RTTI] (RTTI).
[RTTI][] is disabled by the build configuration for some
platforms. Among other things, this means `dynamic_cast` cannot be used.
Rationale: Other than to implement exceptions (which HotSpot doesn't use),
most potential uses of [RTTI][] are better done via virtual functions. Some of
the remainder can be replaced by bespoke mechanisms. The cost of the
additional runtime data structures needed to support [RTTI][] are deemed not
worthwhile, given the alternatives.
### Memory Allocation
Do not use the standard global allocation and deallocation functions
(operator new and related functions). Use of these functions by HotSpot
code is disabled for some platforms.
Rationale: HotSpot often uses "resource" or "arena" allocation. Even
where heap allocation is used, the standard global functions are
avoided in favor of wrappers around malloc and free that support the
VM's Native Memory Tracking (NMT) feature.
Native memory allocation failures are often treated as non-recoverable.
The place where "out of memory" is (first) detected may be an innocent
bystander, unrelated to the actual culprit.
### Class Inheritance
Use public single inheritance.
Prefer composition rather than non-public inheritance.
Restrict inheritance to the "is-a" case; use composition rather than
non-is-a related inheritance.
Avoid multiple inheritance. Never use virtual inheritance.
### Namespaces
Avoid using namespaces. HotSpot code normally uses "all static"
classes rather than namespaces for grouping. An "all static" class is
not instantiable, has only static members, and is normally derived
(possibly indirectly) from the helper class `AllStatic`.
Benefits of using such classes include:
* Provides access control for members, which is unavailable with
namespaces.
* Avoids [Argument Dependent Lookup][ADL] (ADL).
* Closed for additional members. Namespaces allow names to be added in
multiple contexts, making it harder to see the complete API.
Namespaces should be used only in cases where one of those "benefits"
is actually a hindrance.
In particular, don't use anonymous namespaces. They seem like they should
be useful, and indeed have some real benefits for naming and generated code
size on some platforms. Unfortunately, debuggers don't seem to like them at
all.
<https://groups.google.com/forum/#!topic/mozilla.dev.platform/KsaG3lEEaRM><br>
Suggests Visual Studio debugger might not be able to refer to
anonymous namespace symbols, so can't set breakpoints in them.
Though the discussion seems to go back and forth on that.
<https://firefox-source-docs.mozilla.org/code-quality/coding-style/coding_style_cpp.html><br>
Search for "Anonymous namespaces"
Suggests preferring "static" to anonymous namespaces where applicable,
because of poor debugger support for anonymous namespaces.
<https://sourceware.org/bugzilla/show_bug.cgi?id=16874><br>
Bug for similar gdb problems.
### C++ Standard Library
Avoid using the C++ Standard Library.
Historically, HotSpot has mostly avoided use of the Standard
Library.
(It used to be impossible to use most of it in shared code,
because the build configuration for Solaris with Solaris Studio made
all but a couple of pieces inaccessible. Support for header-only
parts was added in mid-2017. Support for Solaris was removed
in 2020.)
Some reasons for this include
* Exceptions. Perhaps the largest core issue with adopting the use of
Standard Library facilities is exceptions. HotSpot does not use
exceptions and, for platforms which allow doing so, builds with them
turned off. Many Standard Library facilities implicitly or explicitly
use exceptions.
* `assert`. An issue that is quickly encountered is the `assert` macro name
collision ([JDK-8007770](https://bugs.openjdk.java.net/browse/JDK-8007770)).
Some mechanism for addressing this would be needed before much of the
Standard Library could be used. (Not all Standard Library implementations
use assert in header files, but some do.)
* Memory allocation. HotSpot requires explicit control over where
allocations occur. The C++98/03 `std::allocator` class is too limited
to support our usage. (Changes in more recent Standards may remove
this limitation.)
* Implementation vagaries. Bugs, or simply different implementation choices,
can lead to different behaviors among the various Standard Libraries we need
to deal with.
* Inconsistent naming conventions. HotSpot and the C++ Standard use
different naming conventions. The coexistence of those different conventions
might appear jarring and reduce readability.
There are a few exceptions to this rule.
* `#include <new>` to use placement `new`, `std::nothrow`, and `std::nothrow_t`.
* `#include <limits>` to use `std::numeric_limits`.
* `#include <type_traits>`.
* `#include <cstddef>` to use `std::nullptr_t`.
TODO: Rather than directly \#including (permitted) Standard Library
headers, use a convention of \#including wrapper headers (in some
location like hotspot/shared/stdcpp). This provides a single place
for dealing with issues we might have for any given header, esp.
platform-specific issues.
### Type Deduction
Use type deduction only if it makes the code clearer or safer. Do not
use it merely to avoid the inconvenience of writing an explicit type,
unless that type is itself difficult to write. An example of the
latter is a function template return type that depends on template
parameters in a non-trivial way.
There are several contexts where types are deduced.
* Function argument deduction. This is always permitted, and indeed
encouraged. It is nearly always better to allow the type of a
function template argument to be deduced rather than explicitly
specified.
* `auto` variable declarations
([n1984](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1984.pdf))<br>
For local variables, this can be used to make the code clearer by
eliminating type information that is obvious or irrelevant. Excessive
use can make code much harder to understand.
* Function return type deduction
([n3638](https://isocpp.org/files/papers/N3638.html))<br>
Only use if the function body has a very small number of `return`
statements, and generally relatively little other code.
* Generic lambdas. Lambdas are not (yet) permitted.
* Lambda init captures. Lambdas are not (yet) permitted.
### Expression SFINAE
[Substitution Failure Is Not An Error][SFINAE] (SFINAE)
is a template metaprogramming technique that makes use of
template parameter substitution failures to make compile-time decisions.
C++11 relaxed the rules for what constitutes a hard-error when
attempting to substitute template parameters with template arguments,
making most deduction errors be substitution errors; see
([n2634](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2634.html)).
This makes [SFINAE][] more powerful and easier to use. However, the
implementation complexity for this change is significant, and this
seems to be a place where obscure corner-case bugs in various
compilers can be found. So while this feature can (and indeed should)
be used (and would be difficult to avoid), caution should be used when
pushing to extremes.
Here are a few closely related example bugs:<br>
<https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95468><br>
<https://developercommunity.visualstudio.com/content/problem/396562/sizeof-deduced-type-is-sometimes-not-a-constant-ex.html>
### enum
Where appropriate, _scoped-enums_ should be used.
([n2347](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2347.pdf))
Use of _unscoped-enums_ is permitted, though ordinary constants may be
preferable when the automatic initializer feature isn't used.
The underlying type (the _enum-base_) of an unscoped enum type should
always be specified explicitly. When unspecified, the underlying type
is dependent on the range of the enumerator values and the platform.
The underlying type of a _scoped-enum_ should also be specified
explicitly if conversions may be applied to values of that type.
Due to bugs in certain (very old) compilers, there is widespread use
of enums and avoidance of in-class initialization of static integral
constant members. Compilers having such bugs are no longer supported.
Except where an enum is semantically appropriate, new code should use
integral constants.
### thread_local
Do not use `thread_local`
([n2659](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2659.htm));
instead, use the HotSpot macro `THREAD_LOCAL`. The initializer must
be a constant expression.
As was discussed in the review for
[JDK-8230877](https://mail.openjdk.java.net/pipermail/hotspot-dev/2019-September/039487.html),
`thread_local` allows dynamic initialization and destruction
semantics. However, that support requires a run-time penalty for
references to non-function-local `thread_local` variables defined in a
different translation unit, even if they don't need dynamic
initialization. Dynamic initialization and destruction of
namespace-scoped thread local variables also has the same ordering
problems as for ordinary namespace-scoped variables.
### nullptr
Prefer `nullptr`
([n2431](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2431.pdf))
to `NULL`. Don't use (constexpr or literal) 0 for pointers.
For historical reasons there are widespread uses of both `NULL` and of
integer 0 as a pointer value.
### &lt;atomic&gt;
Do not use facilities provided by the `<atomic>` header
([n2427](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html)),
([n2752](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2752.htm));
instead, use the HotSpot `Atomic` class and related facilities.
Atomic operations in HotSpot code must have semantics which are
consistent with those provided by the JDK's compilers for Java. There
are platform-specific implementation choices that a C++ compiler might
make or change that are outside the scope of the C++ Standard, and
might differ from what the Java compilers implement.
In addition, HotSpot `Atomic` has a concept of "conservative" memory
ordering, which may differ from (may be stronger than) sequentially
consistent. There are algorithms in HotSpot that are believed to rely
on that ordering.
### Uniform Initialization
The use of _uniform initialization_
([n2672](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm)),
also known as _brace initialization_, is permitted.
Some relevant sections from cppreference.com:
* [initialization](https://en.cppreference.com/w/cpp/language/initialization)
* [value initialization](https://en.cppreference.com/w/cpp/language/value_initialization)
* [direct initialization](https://en.cppreference.com/w/cpp/language/direct_initialization)
* [list initialization](https://en.cppreference.com/w/cpp/language/list_initialization)
* [aggregate initialization](https://en.cppreference.com/w/cpp/language/aggregate_initialization)
Although related, the use of `std::initializer_list` remains forbidden, as
part of the avoidance of the C++ Standard Library in HotSpot code.
### Additional Permitted Features
* `constexpr`
([n2235](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf))
([n3652](https://isocpp.org/files/papers/N3652.html))
* Sized deallocation
([n3778](https://isocpp.org/files/papers/n3778.html))
* Variadic templates
([n2242](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2242.pdf))
([n2555](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2555.pdf))
* Static assertions
([n1720](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1720.html))
* `decltype`
([n2343](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2343.pdf))
([n3276](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3276.pdf))
* Right angle brackets
([n1757](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1757.html))
* Default template arguments for function templates
([CWG D226](http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#226))
* Template aliases
([n2258](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf))
* Delegating constructors
([n1986](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1986.pdf))
* Explicit conversion operators
([n2437](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf))
* Standard Layout Types
([n2342](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2342.htm))
* Defaulted and deleted functions
([n2346](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2346.htm))
* Dynamic initialization and destruction with concurrency
([n2660](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2660.htm))
* `final` virtual specifiers for classes and virtual functions
([n2928](http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm)),
([n3206](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm)),
([n3272](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm))
* `override` virtual specifiers for virtual functions
([n2928](http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm)),
([n3206](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm)),
([n3272](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm))
* Local and unnamed types as template parameters
([n2657](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2657.htm))
* Range-based `for` loops
([n2930](http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2930.html))
([range-for](https://en.cppreference.com/w/cpp/language/range-for))
### Excluded Features
* New string and character literals
* New character types
([n2249](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2249.html))
* Unicode string literals
([n2442](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm))
* Raw string literals
([n2442](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm))
* Universal character name literals
([n2170](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2170.html))
HotSpot doesn't need any of the new character and string literal
types.
* User-defined literals
([n2765](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2765.pdf)) &mdash;
User-defined literals should not be added casually, but only
through a proposal to add a specific UDL.
* Inline namespaces
([n2535](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2535.htm)) &mdash;
HotSpot makes very limited use of namespaces.
* `using namespace` directives. In particular, don't use `using
namespace std;` to avoid needing to qualify Standard Library names.
* Propagating exceptions
([n2179](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2179.html)) &mdash;
HotSpot does not permit the use of exceptions, so this feature isn't useful.
* Avoid namespace-scoped variables with non-constexpr initialization.
In particular, avoid variables with types requiring non-trivial
initialization or destruction. Initialization order problems can be
difficult to deal with and lead to surprises, as can destruction
ordering. HotSpot doesn't generally try to cleanup on exit, and
running destructors at exit can also lead to problems.
* `[[deprecated]]` attribute
([n3760](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3760.html)) &mdash;
Not relevant in HotSpot code.
* Avoid most operator overloading, preferring named functions. When
operator overloading is used, ensure the semantics conform to the
normal expected behavior of the operation.
* Avoid most implicit conversion constructors and (implicit or explicit)
conversion operators. (Note that conversion to `bool` isn't needed
in HotSpot code because of the "no implicit boolean" guideline.)
* Avoid covariant return types.
* Avoid `goto` statements.
### Undecided Features
This list is incomplete; it serves to explicitly call out some
features that have not yet been discussed.
* Trailing return type syntax for functions
([n2541](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2541.htm))
* Variable templates
([n3651](https://isocpp.org/files/papers/N3651.pdf))
* Member initializers and aggregates
([n3653](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3653.html))
* `[[noreturn]]` attribute
([n2761](http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf))
* Rvalue references and move semantics
* Lambdas
[ADL]: https://en.cppreference.com/w/cpp/language/adl
"Argument Dependent Lookup"
[ODR]: https://en.cppreference.com/w/cpp/language/definition
"One Definition Rule"
[RAII]: https://en.cppreference.com/w/cpp/language/raii
"Resource Acquisition Is Initialization"
[RTTI]: https://en.wikipedia.org/wiki/Run-time_type_information
"Runtime Type Information"
[SFINAE]: https://en.cppreference.com/w/cpp/language/sfinae
"Substitution Failure Is Not An Error"

View File

@@ -1,223 +0,0 @@
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" lang="" xml:lang="">
<head>
<meta charset="utf-8" />
<meta name="generator" content="pandoc" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes" />
<title>Native/Unit Test Development Guidelines</title>
<style type="text/css">
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
span.underline{text-decoration: underline;}
div.column{display: inline-block; vertical-align: top; width: 50%;}
</style>
<link rel="stylesheet" href="../make/data/docs-resources/resources/jdk-default.css" />
<!--[if lt IE 9]>
<script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
<![endif]-->
</head>
<body>
<header id="title-block-header">
<h1 class="title">Native/Unit Test Development Guidelines</h1>
</header>
<nav id="TOC">
<ul>
<li><a href="#good-test-properties">Good test properties</a><ul>
<li><a href="#lightness">Lightness</a></li>
<li><a href="#isolation">Isolation</a></li>
<li><a href="#atomicity-and-self-containment">Atomicity and self-containment</a></li>
<li><a href="#repeatability">Repeatability</a></li>
<li><a href="#informativeness">Informativeness</a></li>
<li><a href="#testing-instead-of-visiting">Testing instead of visiting</a></li>
<li><a href="#nearness">Nearness</a></li>
</ul></li>
<li><a href="#asserts">Asserts</a><ul>
<li><a href="#several-checks">Several checks</a></li>
<li><a href="#first-parameter-is-expected-value">First parameter is expected value</a></li>
<li><a href="#floating-point-comparison">Floating-point comparison</a></li>
<li><a href="#c-string-comparison">C string comparison</a></li>
<li><a href="#error-messages">Error messages</a></li>
<li><a href="#uncluttered-output">Uncluttered output</a></li>
<li><a href="#failures-propagation">Failures propagation</a></li>
</ul></li>
<li><a href="#naming-and-grouping">Naming and Grouping</a><ul>
<li><a href="#test-group-names">Test group names</a></li>
<li><a href="#filename">Filename</a></li>
<li><a href="#file-location">File location</a></li>
<li><a href="#test-names">Test names</a></li>
<li><a href="#fixture-classes">Fixture classes</a></li>
<li><a href="#friend-classes">Friend classes</a></li>
<li><a href="#oscpu-specific-tests">OS/CPU specific tests</a></li>
</ul></li>
<li><a href="#miscellaneous">Miscellaneous</a><ul>
<li><a href="#hotspot-style">Hotspot style</a></li>
<li><a href="#codetest-metrics">Code/test metrics</a></li>
<li><a href="#access-to-non-public-members">Access to non-public members</a></li>
<li><a href="#death-tests">Death tests</a></li>
<li><a href="#external-flags">External flags</a></li>
<li><a href="#test-specific-flags">Test-specific flags</a></li>
<li><a href="#flag-restoring">Flag restoring</a></li>
<li><a href="#googletest-documentation">GoogleTest documentation</a></li>
</ul></li>
<li><a href="#todo">TODO</a></li>
</ul>
</nav>
<p>The purpose of these guidelines is to establish a shared vision on what kind of native tests and how we want to develop them for Hotspot using GoogleTest. Hence these guidelines include style items as well as test approach items.</p>
<p>First section of this document describes properties of good tests which are common for almost all types of test regardless of language, framework, etc. Further sections provide recommendations to achieve those properties and other HotSpot and/or GoogleTest specific guidelines.</p>
<h2 id="good-test-properties">Good test properties</h2>
<h3 id="lightness">Lightness</h3>
<p>Use the most lightweight type of tests.</p>
<p>In Hotspot, there are 3 different types of tests regarding their dependency on a JVM, each next level is slower than previous</p>
<ul>
<li><p><code>TEST</code> : a test does not depend on a JVM</p></li>
<li><p><code>TEST_VM</code> : a test does depend on an initialized JVM, but are supposed not to break a JVM, i.e. leave it in a workable state.</p></li>
<li><p><code>TEST_OTHER_VM</code> : a test depends on a JVM and requires a freshly initialized JVM or leaves a JVM in non-workable state</p></li>
</ul>
<h3 id="isolation">Isolation</h3>
<p>Tests have to be isolated: not to have visible side-effects, influences on other tests results.</p>
<p>Results of one test should not depend on test execution order, other tests, otherwise it is becoming almost impossible to find out why a test failed. Due to hotspot-specific, it is not so easy to get a full isolation, e.g. we share an initialized JVM between all <code>TEST_VM</code> tests, so if your test changes JVM's state too drastically and does not change it back, you had better consider <code>TEST_OTHER_VM</code>.</p>
<h3 id="atomicity-and-self-containment">Atomicity and self-containment</h3>
<p>Tests should be <em>atomic</em> and <em>self-contained</em> at the same time.</p>
<p>One test should check a particular part of a class, subsystem, functionality, etc. Then it is quite easy to determine what parts of a product are broken basing on test failures. On the other hand, a test should test that part more-or-less entirely, because when one sees a test <code>FooTest::bar</code>, they assume all aspects of bar from <code>Foo</code> are tested.</p>
<p>However, it is impossible to cover all aspects even of a method, not to mention a subsystem. In such cases, it is recommended to have several tests, one for each aspect of a thing under test. For example one test to tests how <code>Foo::bar</code> works if an argument is <code>null</code>, another test to test how it works if an argument is acceptable but <code>Foo</code> is not in the right state to accept it and so on. This helps not only to make tests atomic, self-contained but also makes test name self-descriptive (discussed in more details in <a href="#test-names">Test names</a>).</p>
<h3 id="repeatability">Repeatability</h3>
<p>Tests have to be repeatable.</p>
<p>Reproducibility is very crucial for a test. No one likes sporadic test failures, they are hard to investigate, fix and verify a fix.</p>
<p>In some cases, it is quite hard to write a 100% repeatable test, since besides a test there can be other moving parts, e.g. in case of <code>TEST_VM</code> there are several concurrently running threads. Despite this, we should try to make a test as reproducible as possible.</p>
<h3 id="informativeness">Informativeness</h3>
<p>In case of a failure, a test should be as <em>informative</em> as possible.</p>
<p>Having more information about a test failure than just compared values can be very useful for failure troubleshooting, it can reduce or even completely eliminate debugging hours. This is even more important in case of not 100% reproducible failures.</p>
<p>Achieving this property, one can easily make a test too verbose, so it will be really hard to find useful information in the ocean of useless information. Hence they should not only think about how to provide <a href="#error-messages">good information</a>, but also <a href="#uncluttered-output">when to do it</a>.</p>
<h3 id="testing-instead-of-visiting">Testing instead of visiting</h3>
<p>Tests should <em>test</em>.</p>
<p>It is not enough just to &quot;visit&quot; some code, a test should check that code does that it has to do, compare return values with expected values, check that desired side effects are done, and undesired are not, and so on. In other words, a test should contain at least one GoogleTest assertion and do not rely on JVM asserts.</p>
<p>Generally speaking to write a good test, one should create a model of the system under tests, a model of possible bugs (or bugs which one wants to find) and design tests using those models.</p>
<h3 id="nearness">Nearness</h3>
<p>Prefer having checks inside test code.</p>
<p>Not only does having test logic outside, e.g. verification method, depending on asserts in product code contradict with several items above but also decreases tests readability and stability. It is much easier to understand that a test is testing when all testing logic is located inside a test or nearby in shared test libraries. As a rule of thumb, the closer a check to a test, the better.</p>
<h2 id="asserts">Asserts</h2>
<h3 id="several-checks">Several checks</h3>
<p>Prefer <code>EXPECT</code> over <code>ASSERT</code> if possible.</p>
<p>This is related to the <a href="#informativeness">informativeness</a> property of tests, information for other checks can help to better localize a defects root-cause. One should use <code>ASSERT</code> if it is impossible to continue test execution or if it does not make much sense. Later in the text, <code>EXPECT</code> forms will be used to refer to both <code>ASSERT/EXPECT</code>.</p>
<p>When it is possible to make several different checks, but impossible to continue test execution if at least one check fails, you can use <code>::testing::Test::HasNonfatalFailure()</code> function. The recommended way to express that is <code>ASSERT_FALSE(::testing::Test::HasNonfatalFailure())</code>. Besides making it clear why a test is aborted, it also allows you to provide more information about a failure.</p>
<h3 id="first-parameter-is-expected-value">First parameter is expected value</h3>
<p>In all equality assertions, expected values should be passed as the first parameter.</p>
<p>This convention is adopted by GoogleTest, and there is a slight difference in how GoogleTest treats parameters, the most important one is <code>null</code> detection. Due to different reasons, <code>null</code> detection is enabled only for the first parameter, that is to said <code>EXPECT_EQ(NULL, object)</code> checks that object is <code>null</code>, while <code>EXPECT_EQ(object, NULL)</code> checks that object equals to <code>NULL</code>, GoogleTest is very strict regarding types of compared values so the latter will generates a compile-time error.</p>
<h3 id="floating-point-comparison">Floating-point comparison</h3>
<p>Use floating-point special macros to compare <code>float/double</code> values.</p>
<p>Because of floating-point number representations and round-off errors, regular equality comparison will not return true in most cases. There are special <code>EXPECT_FLOAT_EQ/EXPECT_DOUBLE_EQ</code> assertions which check that the distance between compared values is not more than 4 ULPs, there is also <code>EXPECT_NEAR(v1, v2, eps)</code> which checks that the absolute value of the difference between <code>v1</code> and <code>v2</code> is not greater than <code>eps</code>.</p>
<h3 id="c-string-comparison">C string comparison</h3>
<p>Use string special macros for C strings comparisons.</p>
<p><code>EXPECT_EQ</code> just compares pointers values, which is hardly what one wants comparing C strings. GoogleTest provides <code>EXPECT_STREQ</code> and <code>EXPECT_STRNE</code> macros to compare C string contents. There are also case-insensitive versions <code>EXPECT_STRCASEEQ</code>, <code>EXPECT_STRCASENE</code>.</p>
<h3 id="error-messages">Error messages</h3>
<p>Provide informative, but not too verbose error messages.</p>
<p>All GoogleTest asserts print compared expressions and their values, so there is no need to have them in error messages. Asserts print only compared values, they do not print any of interim variables, e.g. <code>ASSERT_TRUE((val1 == val2 &amp;&amp; isFail(foo(8)) || i == 18)</code> prints only one value. If you use some complex predicates, please consider <code>EXPECT_PRED*</code> or <code>EXPECT_FORMAT_PRED</code> assertions family, they check that a predicate returns true/success and print out all parameters values.</p>
<p>However in some cases, default information is not enough, a commonly used example is an assert inside a loop, GoogleTest will not print iteration values (unless it is an assert's parameter). Other demonstrative examples are printing error code and a corresponding error message; printing internal states which might have an impact on results. One should add this information to assert message using <code>&lt;&lt;</code> operator.</p>
<h3 id="uncluttered-output">Uncluttered output</h3>
<p>Print information only if it is needed.</p>
<p>Too verbose tests which print all information even if they pass are very bad practice. They just pollute output, so it becomes harder to find useful information. In order not print information till it is really needed, one should consider saving it to a temporary buffer and pass to an assert. <a href="https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp" class="uri">https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp</a> has a good example how to do that.</p>
<h3 id="failures-propagation">Failures propagation</h3>
<p>Wrap a subroutine call into <code>EXPECT_NO_FATAL_FAILURE</code> macro to propagate failures.</p>
<p><code>ASSERT</code> and <code>FAIL</code> abort only the current function, so if you have them in a subroutine, a test will not be aborted after the subroutine even if <code>ASSERT</code> or <code>FAIL</code> fails. You should call such subroutines in <code>ASSERT_NO_FATAL_FAILURE</code> macro to propagate fatal failures and abort a test. <code>(EXPECT|ASSERT)_NO_FATAL_FAILURE</code> can also be used to provide more information.</p>
<p>Due to obvious reasons, there are no <code>(EXPECT|ASSERT)_NO_NONFATAL_FAILURE</code> macros. However, if you need to check if a subroutine generated a nonfatal failure (failed an <code>EXPECT</code>), you can use <code>::testing::Test::HasNonfatalFailure</code> function, or <code>::testing::Test::HasFailure</code> function to check if a subroutine generated any failures, see <a href="#several-checks">Several checks</a>.</p>
<h2 id="naming-and-grouping">Naming and Grouping</h2>
<h3 id="test-group-names">Test group names</h3>
<p>Test group names should be in CamelCase, start and end with a letter. A test group should be named after tested class, functionality, subsystem, etc.</p>
<p>This naming scheme helps to find tests, filter them and simplifies test failure analysis. For example, class <code>Foo</code> - test group <code>Foo</code>, compiler logging subsystem - test group <code>CompilerLogging</code>, G1 GC — test group <code>G1GC</code>, and so forth.</p>
<h3 id="filename">Filename</h3>
<p>A test file must have <code>test_</code> prefix and <code>.cpp</code> suffix.</p>
<p>Both are actually requirements from the current build system to recognize your tests.</p>
<h3 id="file-location">File location</h3>
<p>Test file location should reflect a location of the tested part of the product.</p>
<ul>
<li><p>All unit tests for a class from <code>foo/bar/baz.cpp</code> should be placed <code>foo/bar/test_baz.cpp</code> in <code>hotspot/test/native/</code> directory. Having all tests for a class in one file is a common practice for unit tests, it helps to see all existing tests at once, share functions and/or resources without losing encapsulation.</p></li>
<li><p>For tests which test more than one class, directory hierarchy should be the same as product hierarchy, and file name should reflect the name of the tested subsystem/functionality. For example, if a sub-system under tests belongs to <code>gc/g1</code>, tests should be placed in <code>gc/g1</code> directory.</p></li>
</ul>
<p>Please note that framework prepends directory name to a test group name. For example, if <code>TEST(foo, check_this)</code> and <code>TEST(bar, check_that)</code> are defined in <code>hotspot/test/native/gc/shared/test_foo.cpp</code> file, they will be reported as <code>gc/shared/foo::check_this</code> and <code>gc/shared/bar::check_that</code>.</p>
<h3 id="test-names">Test names</h3>
<p>Test names should be in small_snake_case, start and end with a letter. A test name should reflect that a test checks.</p>
<p>Such naming makes tests self-descriptive and helps a lot during the whole test life cycle. It is easy to do test planning, test inventory, to see what things are not tested, to review tests, to analyze test failures, to evolve a test, etc. For example <code>foo_return_0_if_name_is_null</code> is better than <code>foo_sanity</code> or <code>foo_basic</code> or just <code>foo</code>, <code>humongous_objects_can_not_be_moved_by_young_gc</code> is better than <code>ho_young_gc</code>.</p>
<p>Actually using underscore is against GoogleTest project convention, because it can lead to illegal identifiers, however, this is too strict. Restricting usage of underscore for test names only and prohibiting test name starts or ends with an underscore are enough to be safe.</p>
<h3 id="fixture-classes">Fixture classes</h3>
<p>Fixture classes should be named after tested classes, subsystems, etc (follow <a href="#test-group-names">Test group names rule</a>) and have <code>Test</code> suffix to prevent class name conflicts.</p>
<h3 id="friend-classes">Friend classes</h3>
<p>All test purpose friends should have either <code>Test</code> or <code>Testable</code> suffix.</p>
<p>It greatly simplifies understanding of friendships purpose and allows statically check that private members are not exposed unexpectedly. Having <code>FooTest</code> as a friend of <code>Foo</code> without any comments will be understood as a necessary evil to get testability.</p>
<h3 id="oscpu-specific-tests">OS/CPU specific tests</h3>
<p>Guard OS/CPU specific tests by <code>#ifdef</code> and have OS/CPU name in filename.</p>
<p>For the time being, we do not support separate directories for OS, CPU, OS-CPU specific tests, in case we will have lots of such tests, we will change directory layout and build system to support that in the same way it is done in hotspot.</p>
<h2 id="miscellaneous">Miscellaneous</h2>
<h3 id="hotspot-style">Hotspot style</h3>
<p>Abide the norms and rules accepted in Hotspot style guide.</p>
<p>Tests are a part of Hotspot, so everything (if applicable) we use for Hotspot, should be used for tests as well. Those guidelines cover test-specific things.</p>
<h3 id="codetest-metrics">Code/test metrics</h3>
<p>Coverage information and other code/test metrics are quite useful to decide what tests should be written, what tests should be improved and what can be removed.</p>
<p>For unit tests, widely used and well-known coverage metric is branch coverage, which provides good quality of tests with relatively easy test development process. For other levels of testing, branch coverage is not as good, and one should consider others metrics, e.g. transaction flow coverage, data flow coverage.</p>
<h3 id="access-to-non-public-members">Access to non-public members</h3>
<p>Use explicit friend class to get access to non-public members.</p>
<p>We do not use GoogleTest macro to declare friendship relation, because, from our point of view, it is less clear than an explicit declaration.</p>
<p>Declaring a test fixture class as a friend class of a tested test is the easiest and the clearest way to get access. However, it has some disadvantages, here is some of them:</p>
<ul>
<li>Each test has to be declared as a friend</li>
<li>Subclasses do not inheritance friendship relation</li>
</ul>
<p>In other words, it is harder to share code between tests. Hence if you want to share code or expect it to be useful in other tests, you should consider making members in a tested class protected and introduce a shared test-only class which expose those members via public functions, or even making members publicly accessible right away in a product class. If it is not an option to change members visibility, one can create a friend class which exposes members.</p>
<h3 id="death-tests">Death tests</h3>
<p>You can not use death tests inside <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code>.</p>
<p>We tried to make Hotspot-GoogleTest integration as transparent as possible, however, due to the current implementation of <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> tests, you cannot use death test functionality in them. These tests are implemented as GoogleTest death tests, and GoogleTest does not allow to have a death test inside another death test.</p>
<h3 id="external-flags">External flags</h3>
<p>Passing external flags to a tested JVM is not supported.</p>
<p>The rationality of such design decision is to simplify both tests and a test framework and to avoid failures related to incompatible flags combination till there is a good solution for that. However there are cases when one wants to test a JVM with specific flags combination, <code>_JAVA_OPTIONS</code> environment variable can be used to do that. Flags from <code>_JAVA_OPTIONS</code> will be used in <code>TEST_VM</code>, <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> tests.</p>
<h3 id="test-specific-flags">Test-specific flags</h3>
<p>Passing flags to a tested JVM in <code>TEST_OTHER_VM</code> and <code>TEST_VM_ASSERT*</code> should be possible, but is not implemented yet.</p>
<p>Facility to pass test-specific flags is needed for system, regression or other types of tests which require a fully initialized JVM in some particular configuration, e.g. with Serial GC selected. There is no support for such tests now, however, there is a plan to add that in upcoming releases.</p>
<p>For now, if a test depends on flags values, it should have <code>if (!&lt;flag&gt;) { return }</code> guards in the very beginning and <code>@requires</code> comment similar to jtreg <code>@requires</code> directive right before test macros. <a href="https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp" class="uri">https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp</a> ha an example of this temporary workaround. It is important to follow that pattern as it allows us to easily find all such tests and update them as soon as there is an implementation of flag passing facility.</p>
<p>In long-term, we expect jtreg to support GoogleTest tests as first class citizens, that is to say, jtreg will parse <span class="citation" data-cites="requires">@requires</span> comments and filter out inapplicable tests.</p>
<h3 id="flag-restoring">Flag restoring</h3>
<p>Restore changed flags.</p>
<p>It is quite common for tests to configure JVM in a certain way changing flags values. GoogleTest provides two ways to set up environment before a test and restore it afterward: using either constructor and destructor or <code>SetUp</code> and <code>TearDown</code> functions. Both ways require to use a test fixture class, which sometimes is too wordy. The simpler facilities like <code>FLAG_GUARD</code> macro or <code>*FlagSetting</code> classes could be used in such cases to restore/set values.</p>
<p>Caveats:</p>
<ul>
<li><p>Changing a flags value could break the invariants between flags' values and hence could lead to unexpected/unsupported JVM state.</p></li>
<li><p><code>FLAG_SET_*</code> macros can change more than one flag (in order to maintain invariants) so it is hard to predict what flags will be changed and it makes restoring all changed flags a nontrivial task. Thus in case one uses <code>FLAG_SET_*</code> macros, they should use <code>TEST_OTHER_VM</code> test type.</p></li>
</ul>
<h3 id="googletest-documentation">GoogleTest documentation</h3>
<p>In case you have any questions regarding GoogleTest itself, its asserts, test declaration macros, other macros, etc, please consult its documentation.</p>
<h2 id="todo">TODO</h2>
<p>Although this document provides guidelines on the most important parts of test development using GTest, it still misses a few items:</p>
<ul>
<li><p>Examples, esp for <a href="#access-to-non-public-members">access to non-public members</a></p></li>
<li>test types: purpose, drawbacks, limitation
<ul>
<li><code>TEST_VM</code></li>
<li><code>TEST_VM_F</code></li>
<li><code>TEST_OTHER_VM</code></li>
<li><code>TEST_VM_ASSERT</code></li>
<li><code>TEST_VM_ASSERT_MSG</code></li>
</ul></li>
<li>Miscellaneous
<ul>
<li>Test libraries
<ul>
<li>where to place</li>
<li>how to write</li>
<li>how to use</li>
</ul></li>
<li>test your tests
<ul>
<li>how to run tests in random order</li>
<li>how to run only specific tests</li>
<li>how to run each test separately</li>
<li>check that a test can find bugs it is supposed to by introducing them</li>
</ul></li>
<li>mocks/stubs/dependency injection</li>
<li>setUp/tearDown
<ul>
<li>vs c-tor/d-tor</li>
<li>empty test to test them</li>
</ul></li>
<li>internal (declared in .cpp) struct/classes</li>
</ul></li>
</ul>
</body>
</html>

View File

@@ -1,451 +0,0 @@
% Native/Unit Test Development Guidelines
The purpose of these guidelines is to establish a shared vision on
what kind of native tests and how we want to develop them for Hotspot
using GoogleTest. Hence these guidelines include style items as well
as test approach items.
First section of this document describes properties of good tests
which are common for almost all types of test regardless of language,
framework, etc. Further sections provide recommendations to achieve
those properties and other HotSpot and/or GoogleTest specific
guidelines.
## Good test properties
### Lightness
Use the most lightweight type of tests.
In Hotspot, there are 3 different types of tests regarding their
dependency on a JVM, each next level is slower than previous
* `TEST` : a test does not depend on a JVM
* `TEST_VM` : a test does depend on an initialized JVM, but are
supposed not to break a JVM, i.e. leave it in a workable state.
* `TEST_OTHER_VM` : a test depends on a JVM and requires a freshly
initialized JVM or leaves a JVM in non-workable state
### Isolation
Tests have to be isolated: not to have visible side-effects,
influences on other tests results.
Results of one test should not depend on test execution order, other
tests, otherwise it is becoming almost impossible to find out why a
test failed. Due to hotspot-specific, it is not so easy to get a full
isolation, e.g. we share an initialized JVM between all `TEST_VM` tests,
so if your test changes JVM's state too drastically and does not
change it back, you had better consider `TEST_OTHER_VM`.
### Atomicity and self-containment
Tests should be *atomic* and *self-contained* at the same time.
One test should check a particular part of a class, subsystem,
functionality, etc. Then it is quite easy to determine what parts of a
product are broken basing on test failures. On the other hand, a test
should test that part more-or-less entirely, because when one sees a
test `FooTest::bar`, they assume all aspects of bar from `Foo` are tested.
However, it is impossible to cover all aspects even of a method, not
to mention a subsystem. In such cases, it is recommended to have
several tests, one for each aspect of a thing under test. For example
one test to tests how `Foo::bar` works if an argument is `null`, another
test to test how it works if an argument is acceptable but `Foo` is not
in the right state to accept it and so on. This helps not only to make
tests atomic, self-contained but also makes test name self-descriptive
(discussed in more details in [Test names](#test-names)).
### Repeatability
Tests have to be repeatable.
Reproducibility is very crucial for a test. No one likes sporadic test
failures, they are hard to investigate, fix and verify a fix.
In some cases, it is quite hard to write a 100% repeatable test, since
besides a test there can be other moving parts, e.g. in case of
`TEST_VM` there are several concurrently running threads. Despite this,
we should try to make a test as reproducible as possible.
### Informativeness
In case of a failure, a test should be as *informative* as possible.
Having more information about a test failure than just compared values
can be very useful for failure troubleshooting, it can reduce or even
completely eliminate debugging hours. This is even more important in
case of not 100% reproducible failures.
Achieving this property, one can easily make a test too verbose, so it
will be really hard to find useful information in the ocean of useless
information. Hence they should not only think about how to provide
[good information](#error-messages), but also
[when to do it](#uncluttered-output).
### Testing instead of visiting
Tests should *test*.
It is not enough just to "visit" some code, a test should check that
code does that it has to do, compare return values with expected
values, check that desired side effects are done, and undesired are
not, and so on. In other words, a test should contain at least one
GoogleTest assertion and do not rely on JVM asserts.
Generally speaking to write a good test, one should create a model of
the system under tests, a model of possible bugs (or bugs which one
wants to find) and design tests using those models.
### Nearness
Prefer having checks inside test code.
Not only does having test logic outside, e.g. verification method,
depending on asserts in product code contradict with several items
above but also decreases tests readability and stability. It is much
easier to understand that a test is testing when all testing logic is
located inside a test or nearby in shared test libraries. As a rule of
thumb, the closer a check to a test, the better.
## Asserts
### Several checks
Prefer `EXPECT` over `ASSERT` if possible.
This is related to the [informativeness](#informativeness) property of
tests, information for other checks can help to better localize a
defects root-cause. One should use `ASSERT` if it is impossible to
continue test execution or if it does not make much sense. Later in
the text, `EXPECT` forms will be used to refer to both
`ASSERT/EXPECT`.
When it is possible to make several different checks, but impossible
to continue test execution if at least one check fails, you can
use `::testing::Test::HasNonfatalFailure()` function. The recommended
way to express that is
`ASSERT_FALSE(::testing::Test::HasNonfatalFailure())`. Besides making it
clear why a test is aborted, it also allows you to provide more
information about a failure.
### First parameter is expected value
In all equality assertions, expected values should be passed as the
first parameter.
This convention is adopted by GoogleTest, and there is a slight
difference in how GoogleTest treats parameters, the most important one
is `null` detection. Due to different reasons, `null` detection is enabled
only for the first parameter, that is to said `EXPECT_EQ(NULL, object)`
checks that object is `null`, while `EXPECT_EQ(object, NULL)` checks that
object equals to `NULL`, GoogleTest is very strict regarding types of
compared values so the latter will generates a compile-time error.
### Floating-point comparison
Use floating-point special macros to compare `float/double` values.
Because of floating-point number representations and round-off errors,
regular equality comparison will not return true in most cases. There
are special `EXPECT_FLOAT_EQ/EXPECT_DOUBLE_EQ` assertions which check
that the distance between compared values is not more than 4 ULPs,
there is also `EXPECT_NEAR(v1, v2, eps)` which checks that the absolute
value of the difference between `v1` and `v2` is not greater than `eps`.
### C string comparison
Use string special macros for C strings comparisons.
`EXPECT_EQ` just compares pointers values, which is hardly what one
wants comparing C strings. GoogleTest provides `EXPECT_STREQ` and
`EXPECT_STRNE` macros to compare C string contents. There are also
case-insensitive versions `EXPECT_STRCASEEQ`, `EXPECT_STRCASENE`.
### Error messages
Provide informative, but not too verbose error messages.
All GoogleTest asserts print compared expressions and their values, so
there is no need to have them in error messages. Asserts print only
compared values, they do not print any of interim variables, e.g.
`ASSERT_TRUE((val1 == val2 && isFail(foo(8)) || i == 18)` prints only
one value. If you use some complex predicates, please consider
`EXPECT_PRED*` or `EXPECT_FORMAT_PRED` assertions family, they check that
a predicate returns true/success and print out all parameters values.
However in some cases, default information is not enough, a commonly
used example is an assert inside a loop, GoogleTest will not print
iteration values (unless it is an assert's parameter). Other
demonstrative examples are printing error code and a corresponding
error message; printing internal states which might have an impact on
results. One should add this information to assert message using `<<`
operator.
### Uncluttered output
Print information only if it is needed.
Too verbose tests which print all information even if they pass are
very bad practice. They just pollute output, so it becomes harder to
find useful information. In order not print information till it is
really needed, one should consider saving it to a temporary buffer and
pass to an assert.
<https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/shared/test_memset_with_concurrent_readers.cpp>
has a good example how to do that.
### Failures propagation
Wrap a subroutine call into `EXPECT_NO_FATAL_FAILURE` macro to
propagate failures.
`ASSERT` and `FAIL` abort only the current function, so if you have them
in a subroutine, a test will not be aborted after the subroutine even
if `ASSERT` or `FAIL` fails. You should call such subroutines in
`ASSERT_NO_FATAL_FAILURE` macro to propagate fatal failures and abort a
test. `(EXPECT|ASSERT)_NO_FATAL_FAILURE` can also be used to provide
more information.
Due to obvious reasons, there are no
`(EXPECT|ASSERT)_NO_NONFATAL_FAILURE` macros. However, if you need to
check if a subroutine generated a nonfatal failure (failed an `EXPECT`),
you can use `::testing::Test::HasNonfatalFailure` function,
or `::testing::Test::HasFailure` function to check if a subroutine
generated any failures, see [Several checks](#several-checks).
## Naming and Grouping
### Test group names
Test group names should be in CamelCase, start and end with a letter.
A test group should be named after tested class, functionality,
subsystem, etc.
This naming scheme helps to find tests, filter them and simplifies
test failure analysis. For example, class `Foo` - test group `Foo`,
compiler logging subsystem - test group `CompilerLogging`, G1 GC — test
group `G1GC`, and so forth.
### Filename
A test file must have `test_` prefix and `.cpp` suffix.
Both are actually requirements from the current build system to
recognize your tests.
### File location
Test file location should reflect a location of the tested part of the product.
* All unit tests for a class from `foo/bar/baz.cpp` should be placed
`foo/bar/test_baz.cpp` in `hotspot/test/native/` directory. Having all
tests for a class in one file is a common practice for unit tests, it
helps to see all existing tests at once, share functions and/or
resources without losing encapsulation.
* For tests which test more than one class, directory hierarchy should
be the same as product hierarchy, and file name should reflect the
name of the tested subsystem/functionality. For example, if a
sub-system under tests belongs to `gc/g1`, tests should be placed in
`gc/g1` directory.
Please note that framework prepends directory name to a test group
name. For example, if `TEST(foo, check_this)` and `TEST(bar, check_that)`
are defined in `hotspot/test/native/gc/shared/test_foo.cpp` file, they
will be reported as `gc/shared/foo::check_this` and
`gc/shared/bar::check_that`.
### Test names
Test names should be in small_snake_case, start and end with a letter.
A test name should reflect that a test checks.
Such naming makes tests self-descriptive and helps a lot during the
whole test life cycle. It is easy to do test planning, test inventory,
to see what things are not tested, to review tests, to analyze test
failures, to evolve a test, etc. For example
`foo_return_0_if_name_is_null` is better than `foo_sanity` or `foo_basic` or
just `foo`, `humongous_objects_can_not_be_moved_by_young_gc` is better
than `ho_young_gc`.
Actually using underscore is against GoogleTest project convention,
because it can lead to illegal identifiers, however, this is too
strict. Restricting usage of underscore for test names only and
prohibiting test name starts or ends with an underscore are enough to
be safe.
### Fixture classes
Fixture classes should be named after tested classes, subsystems, etc
(follow [Test group names rule](#test-group-names)) and have
`Test` suffix to prevent class name conflicts.
### Friend classes
All test purpose friends should have either `Test` or `Testable` suffix.
It greatly simplifies understanding of friendships purpose and allows
statically check that private members are not exposed unexpectedly.
Having `FooTest` as a friend of `Foo` without any comments will be
understood as a necessary evil to get testability.
### OS/CPU specific tests
Guard OS/CPU specific tests by `#ifdef` and have OS/CPU name in filename.
For the time being, we do not support separate directories for OS,
CPU, OS-CPU specific tests, in case we will have lots of such tests,
we will change directory layout and build system to support that in
the same way it is done in hotspot.
## Miscellaneous
### Hotspot style
Abide the norms and rules accepted in Hotspot style guide.
Tests are a part of Hotspot, so everything (if applicable) we use for
Hotspot, should be used for tests as well. Those guidelines cover
test-specific things.
### Code/test metrics
Coverage information and other code/test metrics are quite useful to
decide what tests should be written, what tests should be improved and
what can be removed.
For unit tests, widely used and well-known coverage metric is branch
coverage, which provides good quality of tests with relatively easy
test development process. For other levels of testing, branch coverage
is not as good, and one should consider others metrics, e.g.
transaction flow coverage, data flow coverage.
### Access to non-public members
Use explicit friend class to get access to non-public members.
We do not use GoogleTest macro to declare friendship relation,
because, from our point of view, it is less clear than an explicit
declaration.
Declaring a test fixture class as a friend class of a tested test is
the easiest and the clearest way to get access. However, it has some
disadvantages, here is some of them:
* Each test has to be declared as a friend
* Subclasses do not inheritance friendship relation
In other words, it is harder to share code between tests. Hence if you
want to share code or expect it to be useful in other tests, you
should consider making members in a tested class protected and
introduce a shared test-only class which expose those members via
public functions, or even making members publicly accessible right
away in a product class. If it is not an option to change members
visibility, one can create a friend class which exposes members.
### Death tests
You can not use death tests inside `TEST_OTHER_VM` and `TEST_VM_ASSERT*`.
We tried to make Hotspot-GoogleTest integration as transparent as
possible, however, due to the current implementation of `TEST_OTHER_VM`
and `TEST_VM_ASSERT*` tests, you cannot use death test functionality in
them. These tests are implemented as GoogleTest death tests, and
GoogleTest does not allow to have a death test inside another death
test.
### External flags
Passing external flags to a tested JVM is not supported.
The rationality of such design decision is to simplify both tests and
a test framework and to avoid failures related to incompatible flags
combination till there is a good solution for that. However there are
cases when one wants to test a JVM with specific flags combination,
`_JAVA_OPTIONS` environment variable can be used to do that. Flags from
`_JAVA_OPTIONS` will be used in `TEST_VM`, `TEST_OTHER_VM` and
`TEST_VM_ASSERT*` tests.
### Test-specific flags
Passing flags to a tested JVM in `TEST_OTHER_VM` and `TEST_VM_ASSERT*`
should be possible, but is not implemented yet.
Facility to pass test-specific flags is needed for system, regression
or other types of tests which require a fully initialized JVM in some
particular configuration, e.g. with Serial GC selected. There is no
support for such tests now, however, there is a plan to add that in
upcoming releases.
For now, if a test depends on flags values, it should have `if
(!<flag>) { return }` guards in the very beginning and `@requires`
comment similar to jtreg `@requires` directive right before test macros.
<https://hg.openjdk.java.net/jdk/jdk/file/tip/test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp>
ha an example of this temporary workaround. It is important to follow
that pattern as it allows us to easily find all such tests and update
them as soon as there is an implementation of flag passing facility.
In long-term, we expect jtreg to support GoogleTest tests as first
class citizens, that is to say, jtreg will parse @requires comments
and filter out inapplicable tests.
### Flag restoring
Restore changed flags.
It is quite common for tests to configure JVM in a certain way
changing flags values. GoogleTest provides two ways to set up
environment before a test and restore it afterward: using either
constructor and destructor or `SetUp` and `TearDown` functions. Both ways
require to use a test fixture class, which sometimes is too wordy. The
simpler facilities like `FLAG_GUARD` macro or `*FlagSetting` classes could
be used in such cases to restore/set values.
Caveats:
* Changing a flags value could break the invariants between flags' values and hence could lead to unexpected/unsupported JVM state.
* `FLAG_SET_*` macros can change more than one flag (in order to
maintain invariants) so it is hard to predict what flags will be
changed and it makes restoring all changed flags a nontrivial task.
Thus in case one uses `FLAG_SET_*` macros, they should use `TEST_OTHER_VM`
test type.
### GoogleTest documentation
In case you have any questions regarding GoogleTest itself, its
asserts, test declaration macros, other macros, etc, please consult
its documentation.
## TODO
Although this document provides guidelines on the most important parts
of test development using GTest, it still misses a few items:
* Examples, esp for [access to non-public members](#access-to-non-public-members)
* test types: purpose, drawbacks, limitation
* `TEST_VM`
* `TEST_VM_F`
* `TEST_OTHER_VM`
* `TEST_VM_ASSERT`
* `TEST_VM_ASSERT_MSG`
* Miscellaneous
* Test libraries
* where to place
* how to write
* how to use
* test your tests
* how to run tests in random order
* how to run only specific tests
* how to run each test separately
* check that a test can find bugs it is supposed to by introducing them
* mocks/stubs/dependency injection
* setUp/tearDown
* vs c-tor/d-tor
* empty test to test them
* internal (declared in .cpp) struct/classes

View File

@@ -0,0 +1,10 @@
# jetbrains/runtime:jbr15env
FROM centos:7
RUN yum -y install centos-release-scl
RUN yum -y install devtoolset-8
RUN yum -y install zip bzip2 unzip tar wget make autoconf automake libtool alsa-devel cups-devel xorg-x11-devel libjpeg62-devel giflib-devel freetype-devel file which libXtst-devel libXt-devel libXrender-devel alsa-lib-devel fontconfig-devel libXrandr-devel libXi-devel git
# Install Java 11
RUN wget https://download.java.net/java/GA/jdk14.0.1/664493ef4a6946b186ff29eb326336a2/7/GPL/openjdk-14.0.1_linux-x64_bin.tar.gz \
-O - | tar xz -C /
ENV JAVA_HOME /jbrsdk
ENV PATH $JAVA_HOME/bin:/opt/rh/devtoolset-8/root/usr/bin:$PATH

View File

@@ -0,0 +1,9 @@
#!/bin/bash -x
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
script_dir=jb/project/tools/linux/scripts
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jcef" || exit $?
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx" || exit $?
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx_jcef" || exit $?

View File

@@ -0,0 +1,77 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
sh configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-build=$JDK_BUILD_NUMBER \
--with-version-pre= \
--with-version-opt=b$build_number \
--with-boot-jdk=amazon-corretto-11.0.5.10.1-linux-aarch64 \
--with-import-modules=./modular-sdk \
--enable-cds=yes || exit $?
make clean CONF=linux-aarch64-normal-server-release || exit $?
make images CONF=linux-aarch64-normal-server-release test-image || exit $?
JBSDK=${JBRSDK_BASE_NAME}-linux-aarch64-b${build_number}
BASE_DIR=build/linux-aarch64-normal-server-release/images
JSDK=${BASE_DIR}/jdk
JBRSDK_BUNDLE=jbrsdk
echo Fixing permissions
chmod -R a+r $JSDK
rm -rf $BASE_DIR/$JBRSDK_BUNDLE
cp -r $JSDK $BASE_DIR/$JBRSDK_BUNDLE || exit $?
echo Creating $JBSDK.tar.gz ...
tar -pcf $JBSDK.tar \
--exclude=*.debuginfo --exclude=demo --exclude=sample --exclude=man \
-C $BASE_DIR ${JBRSDK_BUNDLE} || exit $?
gzip $JBSDK.tar || exit $?
JBR_BUNDLE=jbr
JBR_BASE_NAME=jbr-$JBSDK_VERSION
rm -rf $BASE_DIR/$JBR_BUNDLE
JBR=$JBR_BASE_NAME-linux-aarch64-b$build_number
grep -v javafx modules.list | grep -v "jdk.internal.vm\|jdk.aot\|jcef" > modules.list.aarch64
echo Running jlink....
${JSDK}/bin/jlink \
--module-path ${JSDK}/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules.list.aarch64 | sed s/" "//g | sed s/,$//g) \
--output ${BASE_DIR}/${JBR_BUNDLE} || exit $?
echo Modifying release info ...
grep -v \"^JAVA_VERSION\" ${JSDK}/release | grep -v \"^MODULES\" >> ${BASE_DIR}/${JBR_BUNDLE}/release
echo Creating $JBR.tar.gz ...
tar -pcf $JBR.tar -C $BASE_DIR ${JBR_BUNDLE} || exit $?
gzip $JBR.tar || exit $?
JBRSDK_TEST=$JBRSDK_BASE_NAME-linux-test-aarch64-b$build_number
echo Creating $JBRSDK_TEST.tar.gz ...
tar -pcf $JBRSDK_TEST.tar -C $BASE_DIR --exclude='test/jdk/demos' test || exit $?
gzip $JBRSDK_TEST.tar || exit $?

View File

@@ -0,0 +1,139 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
bundle_type=$4
function create_jbr {
case "$1" in
"${bundle_type}_lw")
JBR_BASE_NAME=jbr_${bundle_type}_lw-${JBSDK_VERSION}
grep -v "jdk.compiler\|jdk.hotspot.agent" modules.list > modules_tmp.list
;;
"jfx" | "jcef")
JBR_BASE_NAME=jbr_${bundle_type}-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
"jfx_jcef")
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
*)
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
esac
rm -rf ${BASE_DIR}/${JBR_BUNDLE}
JBR=$JBR_BASE_NAME-linux-x64-b$build_number
echo Running jlink....
$JSDK/bin/jlink \
--module-path $JSDK/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules_tmp.list | sed s/" "//g) --output $BASE_DIR/$JBR_BUNDLE
if [[ "$bundle_type" == *jcef* ]]; then
cp -R $BASE_DIR/$JBR_BUNDLE $BASE_DIR/jbr
cp -R jcef_linux_x64/* $BASE_DIR/$JBR_BUNDLE/lib || exit $?
fi
grep -v "^JAVA_VERSION" $JSDK/release | grep -v "^MODULES" >> $BASE_DIR/$JBR_BUNDLE/release
echo Creating $JBR.tar.gz ...
if [ ! -z "$bundle_type" ]; then
rm -rf ${BASE_DIR}/jbr
cp -R ${BASE_DIR}/${JBR_BUNDLE} ${BASE_DIR}/jbr
fi
tar -pcf $JBR.tar -C $BASE_DIR jbr || exit $?
gzip $JBR.tar || exit $?
rm -rf ${BASE_DIR}/${JBR_BUNDLE}
}
JBRSDK_BASE_NAME=jbrsdk-$JBSDK_VERSION
#git checkout -- modules.list src
case "$bundle_type" in
"jfx")
git apply -p0 < jb/project/tools/exclude_jcef_module.patch
;;
"jcef")
git apply -p0 < jb/project/tools/exclude_jfx_module.patch
;;
esac
if [ -z "$bundle_type" ]; then
JBR_BUNDLE=jbr
sh configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-boot-jdk=$BOOT_JDK \
--enable-cds=yes || exit $?
else
JBR_BUNDLE=jbr_${bundle_type}
sh configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-import-modules=./modular-sdk \
--with-boot-jdk=$BOOT_JDK \
--enable-cds=yes || exit $?
fi
make images CONF=linux-x86_64-server-release || exit $?
JSDK=build/linux-x86_64-server-release/images/jdk
JBSDK=$JBRSDK_BASE_NAME-linux-x64-b$build_number
echo Fixing permissions
chmod -R a+r $JSDK
BASE_DIR=build/linux-x86_64-server-release/images
JBRSDK_BUNDLE=jbrsdk
rm -rf $BASE_DIR/$JBRSDK_BUNDLE
cp -r $JSDK $BASE_DIR/$JBRSDK_BUNDLE || exit $?
if [[ "$bundle_type" == *jcef* ]]; then
cp -R jcef_linux_x64/* $BASE_DIR/$JBRSDK_BUNDLE/lib || exit $?
fi
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
echo Creating $JBSDK.tar.gz ...
tar -pcf $JBSDK.tar --exclude=*.debuginfo --exclude=demo --exclude=sample --exclude=man \
-C $BASE_DIR $JBRSDK_BUNDLE || exit $?
gzip $JBSDK.tar || exit $?
fi
create_jbr ${bundle_type}
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
make test-image || exit $?
JBRSDK_TEST=$JBRSDK_BASE_NAME-linux-test-x64-b$build_number
echo Creating $JBSDK_TEST.tar.gz ...
tar -pcf $JBRSDK_TEST.tar -C $BASE_DIR --exclude='test/jdk/demos' test || exit $?
gzip $JBRSDK_TEST.tar || exit $?
fi

View File

@@ -0,0 +1,72 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
sh configure \
--disable-warnings-as-errors \
--with-debug-level=fastdebug \
--with-version-build=$JDK_BUILD_NUMBER \
--with-version-pre= \
--with-version-opt=b$build_number \
--with-import-modules=./modular-sdk \
--enable-cds=yes || exit $?
make clean CONF=linux-x86_64-normal-server-fastdebug || exit $?
make images CONF=linux-x86_64-normal-server-fastdebug || exit $?
JBSDK=${JBRSDK_BASE_NAME}-linux-x64-fastdebug-b${build_number}
BASE_DIR=build/linux-x86_64-normal-server-fastdebug/images
JSDK=${BASE_DIR}/jdk
JBRSDK_BUNDLE=jbrsdk
echo Fixing permissions
chmod -R a+r $JSDK
rm -rf $BASE_DIR/$JBRSDK_BUNDLE
cp -r $JSDK $BASE_DIR/$JBRSDK_BUNDLE || exit $?
cp -R jcef_linux_x64/* $BASE_DIR/$JBRSDK_BUNDLE/lib || exit $?
echo Creating $JBSDK.tar.gz ...
tar -pcf $JBSDK.tar \
--exclude=*.debuginfo --exclude=demo --exclude=sample --exclude=man \
-C $BASE_DIR ${JBRSDK_BUNDLE} || exit $?
gzip $JBSDK.tar || exit $?
JBR_BUNDLE=jbr
JBR_BASE_NAME=jbr-$JBSDK_VERSION
rm -rf $BASE_DIR/$JBR_BUNDLE
JBR=$JBR_BASE_NAME-linux-x64-fastdebug-b$build_number
echo Running jlink....
${JSDK}/bin/jlink \
--module-path ${JSDK}/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules.list | sed s/" "//g | sed s/,$//g) \
--output ${BASE_DIR}/${JBR_BUNDLE} || exit $?
cp -R jcef_linux_x64/* $BASE_DIR/$JBR_BUNDLE/lib || exit $?
echo Modifying release info ...
grep -v \"^JAVA_VERSION\" ${JSDK}/release | grep -v \"^MODULES\" >> ${BASE_DIR}/${JBR_BUNDLE}/release
echo Creating $JBR.tar.gz ...
tar -czf $JBR.tar -C $BASE_DIR ${JBR_BUNDLE} || exit $?
gzip $JBR.tar || exit $?

View File

@@ -0,0 +1,73 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
linux32 bash configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-build=$JDK_BUILD_NUMBER \
--with-version-pre= \
--with-version-opt=b$build_number \
--with-boot-jdk=/jbrsdk-11.0.5-b1 \
--enable-cds=yes || exit $?
make clean CONF=linux-x86-normal-server-release || exit $?
make images CONF=linux-x86-normal-server-release test-image || exit $?
JBSDK=${JBRSDK_BASE_NAME}-linux-x86-b${build_number}
BASE_DIR=build/linux-x86-normal-server-release/images
JSDK=${BASE_DIR}/jdk
JBRSDK_BUNDLE=jbrsdk
echo Fixing permissions
chmod -R a+r $JSDK
rm -rf $BASE_DIR/$JBRSDK_BUNDLE
cp -r $JSDK $BASE_DIR/$JBRSDK_BUNDLE || exit $?
echo Creating $JBSDK.tar.gz ...
tar -pcf $JBSDK.tar --exclude=*.debuginfo --exclude=demo --exclude=sample --exclude=man -C $BASE_DIR ${JBRSDK_BUNDLE} || exit $?
gzip $JBSDK.tar || exit $?
JBR_BUNDLE=jbr
JBR_BASE_NAME=jbr-$JBSDK_VERSION
rm -rf $BASE_DIR/$JBR_BUNDLE
JBR=$JBR_BASE_NAME-linux-x86-b$build_number
grep -v javafx modules.list | grep -v "jdk.internal.vm\|jdk.aot\|jcef" > modules.list.x86
echo Running jlink....
${JSDK}/bin/jlink \
--module-path ${JSDK}/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules.list.x86 | sed s/" "//g | sed s/,$//g) --output ${BASE_DIR}/${JBR_BUNDLE} || exit $?
echo Modifying release info ...
grep -v \"^JAVA_VERSION\" ${JSDK}/release | grep -v \"^MODULES\" >> ${BASE_DIR}/${JBR_BUNDLE}/release
echo Creating $JBR.tar.gz ...
tar -pcf $JBR.tar -C $BASE_DIR $JBR_BUNDLE || exit $?
gzip $JBR.tar || exit $?
JBRSDK_TEST=$JBRSDK_BASE_NAME-linux-test-x86-b$build_number
echo Creating $JBRSDK_TEST.tar.gz ...
tar -pcf $JBRSDK_TEST.tar -C $BASE_DIR --exclude='test/jdk/demos' --exclude='test/hotspot/gtest' test || exit $?
gzip $JBRSDK_TEST.tar || exit $?

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<key>com.apple.security.cs.disable-executable-page-protection</key>
<true/>
</dict>
</plist>

View File

@@ -0,0 +1,9 @@
#!/bin/bash -x
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
script_dir=jb/project/tools/mac/scripts
${script_dir}/mkimages.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jcef" || exit $?
${script_dir}/mkimages.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx" || exit $?
${script_dir}/mkimages.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx_jcef" || exit $?

View File

@@ -0,0 +1,148 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
bundle_type=$4
function create_jbr {
case "$1" in
"${bundle_type}_lw")
JBR_BASE_NAME=jbr_${bundle_type}_lw-${JBSDK_VERSION}
grep -v "jdk.compiler\|jdk.hotspot.agent" modules.list > modules_tmp.list
;;
"jfx" | "jcef")
JBR_BASE_NAME=jbr_${bundle_type}-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
"jfx_jcef")
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
*)
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
cat modules.list > modules_tmp.list
;;
esac
rm -rf ${BASE_DIR}/${JBR_BUNDLE}
JRE_CONTENTS=${BASE_DIR}/${JBR_BUNDLE}/Contents
JRE_HOME=${JRE_CONTENTS}/Home
if [ -d "${JRE_CONTENTS}" ]; then
rm -rf ${JRE_CONTENTS}
fi
mkdir -p ${JRE_CONTENTS}
JBR=${JBR_BASE_NAME}-osx-x64-b${build_number}
${BASE_DIR}/$JBRSDK_BUNDLE/Contents/Home/bin/jlink \
--module-path ${BASE_DIR}/${JBRSDK_BUNDLE}/Contents/Home/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules_tmp.list | sed s/" "//g) --output ${JRE_HOME} || exit $?
grep -v "^JAVA_VERSION" ${BASE_DIR}/${JBRSDK_BUNDLE}/Contents/Home/release | grep -v "^MODULES" >> ${JRE_HOME}/release
cp -R ${BASE_DIR}/${JBRSDK_BUNDLE}/Contents/MacOS ${JRE_CONTENTS}
cp ${BASE_DIR}/${JBRSDK_BUNDLE}/Contents/Info.plist ${JRE_CONTENTS}
if [[ "${bundle_type}" == *jcef* ]]; then
rm -rf ${JRE_CONTENTS}/Frameworks || exit $?
rm -rf ${JRE_CONTENTS}/Helpers || exit $?
cp -a jcef_mac/Frameworks ${JRE_CONTENTS} || exit $?
cp -a jcef_mac/Helpers ${JRE_CONTENTS} || exit $?
fi
echo Creating ${JBR}.tar.gz ...
if [ ! -z "$bundle_type" ]; then
rm -rf ${BASE_DIR}/jbr
cp -R ${BASE_DIR}/${JBR_BUNDLE} ${BASE_DIR}/jbr
fi
COPYFILE_DISABLE=1 tar -pczf ${JBR}.tar.gz --exclude='*.dSYM' --exclude='man' -C ${BASE_DIR} jbr || exit $?
rm -rf ${BASE_DIR}/${JBR_BUNDLE}
}
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
#git checkout -- modules.list src
case "$bundle_type" in
"jfx")
git apply -p0 < jb/project/tools/exclude_jcef_module.patch
;;
"jcef")
git apply -p0 < jb/project/tools/exclude_jfx_module.patch
;;
esac
if [ -z "$bundle_type" ]; then
JBR_BUNDLE=jbr
sh configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-boot-jdk=`/usr/libexec/java_home -v $BOOT_JDK` \
--enable-cds=yes || exit $?
else
JBR_BUNDLE=jbr_${bundle_type}
sh configure \
--disable-warnings-as-errors \
--with-debug-level=release \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-import-modules=./modular-sdk \
--with-boot-jdk=`/usr/libexec/java_home -v $BOOT_JDK` \
--enable-cds=yes || exit $?
fi
make images CONF=macosx-x86_64-server-release || exit $?
JSDK=build/macosx-x86_64-server-release/images/jdk-bundle
JBSDK=${JBRSDK_BASE_NAME}-osx-x64-b${build_number}
BASE_DIR=jre
JBRSDK_BUNDLE=jbrsdk
rm -rf $BASE_DIR
mkdir $BASE_DIR || exit $?
JBSDK_VERSION_WITH_DOTS=$(echo $JBSDK_VERSION | sed 's/_/\./g')
cp -a $JSDK/jdk-$JBSDK_VERSION_WITH_DOTS.jdk $BASE_DIR/$JBRSDK_BUNDLE || exit $?
if [[ "$bundle_type" == *jcef* ]]; then
cp -a jcef_mac/Frameworks $BASE_DIR/$JBRSDK_BUNDLE/Contents/ || exit $?
cp -a jcef_mac/Helpers $BASE_DIR/$JBRSDK_BUNDLE/Contents/ || exit $?
fi
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
echo Creating $JBSDK.tar.gz ...
COPYFILE_DISABLE=1 tar -pczf $JBSDK.tar.gz -C $BASE_DIR \
--exclude='._*' --exclude='.DS_Store' --exclude='*~' \
--exclude='Home/demo' --exclude='Home/man' --exclude='Home/sample' \
$JBRSDK_BUNDLE || exit $?
fi
create_jbr "${bundle_type}" || exit $?
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
make test-image || exit $?
JBRSDK_TEST=$JBRSDK_BASE_NAME-osx-test-x64-b$build_number
echo Creating $JBRSDK_TEST.tar.gz ...
COPYFILE_DISABLE=1 tar -pczf $JBRSDK_TEST.tar.gz -C build/macosx-x86_64-server-release/images \
--exclude='test/jdk/demos' test || exit $?
fi

View File

@@ -0,0 +1,83 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
sh configure \
--disable-warnings-as-errors \
--with-debug-level=fastdebug \
--with-version-build=$JDK_BUILD_NUMBER \
--with-version-pre= \
--with-version-opt=b$build_number \
--with-import-modules=./modular-sdk \
--with-boot-jdk=`/usr/libexec/java_home -v 11` \
--enable-cds=yes || exit $?
make clean CONF=macosx-x86_64-normal-server-fastdebug || exit $?
make images CONF=macosx-x86_64-normal-server-fastdebug || exit $?
JSDK=build/macosx-x86_64-normal-server-fastdebug/images/jdk-bundle
JBSDK=${JBRSDK_BASE_NAME}-osx-x64-fastdebug-b${build_number}
BASE_DIR=jre
JBRSDK_BUNDLE=jbrsdk
rm -rf $BASE_DIR
mkdir $BASE_DIR || exit $?
JBSDK_VERSION_WITH_DOTS=$(echo $JBSDK_VERSION | sed 's/_/\./g')
cp -a $JSDK/jdk-$JBSDK_VERSION_WITH_DOTS.jdk $BASE_DIR/$JBRSDK_BUNDLE || exit $?
echo Creating $JBSDK.tar.gz ...
cp -a jcef_mac/Frameworks $BASE_DIR/$JBRSDK_BUNDLE/Contents/
cp -a jcef_mac/Helpers $BASE_DIR/$JBRSDK_BUNDLE/Contents
COPYFILE_DISABLE=1 \
tar -pczf ${JBSDK}.tar.gz -C ${BASE_DIR} \
--exclude='._*' --exclude='.DS_Store' --exclude='*~' \
--exclude='Home/demo' --exclude='Home/man' --exclude='Home/sample' \
${JBRSDK_BUNDLE} || exit $?
JBR_BUNDLE=jbr
JRE_CONTENTS=$BASE_DIR/$JBR_BUNDLE/Contents
JRE_HOME=$JRE_CONTENTS/Home
JBR_BASE_NAME=jbr-$JBSDK_VERSION
mkdir -p $JRE_CONTENTS
if [ -d "$JRE_HOME" ]; then
rm -rf $JRE_HOME
fi
JBR=${JBR_BASE_NAME}-osx-x64-fastdebug-b${build_number}
$BASE_DIR/$JBRSDK_BUNDLE/Contents/Home/bin/jlink \
--module-path $BASE_DIR/$JBRSDK_BUNDLE/Contents/Home/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules.list | sed s/" "//g) --output $JRE_HOME || exit $?
grep -v "^JAVA_VERSION" $BASE_DIR/$JBRSDK_BUNDLE/Contents/Home/release | grep -v "^MODULES" >> $JRE_HOME/release
cp -R $BASE_DIR/$JBRSDK_BUNDLE/Contents/MacOS $JRE_CONTENTS
cp $BASE_DIR/$JBRSDK_BUNDLE/Contents/Info.plist $JRE_CONTENTS
cp -a jcef_mac/Frameworks ${JRE_CONTENTS} || exit $?
cp -a jcef_mac/Helpers ${JRE_CONTENTS} || exit $?
echo Creating $JBR.tar.gz ...
COPYFILE_DISABLE=1 tar -pczf $JBR.tar.gz --exclude='*.dSYM' --exclude='man' -C $BASE_DIR $JBR_BUNDLE || exit $?

View File

@@ -0,0 +1,120 @@
#!/bin/bash
APP_DIRECTORY=$1
APPL_USER=$2
APPL_PASSWORD=$3
APP_NAME=$4
BUNDLE_ID=$5
FAKE_ROOT="${6:-fake-root}"
if [[ -z "$APP_DIRECTORY" ]] || [[ -z "$APPL_USER" ]] || [[ -z "$APPL_PASSWORD" ]]; then
echo "Usage: $0 AppDirectory Username Password"
exit 1
fi
if [[ ! -d "$APP_DIRECTORY" ]]; then
echo "AppDirectory '$APP_DIRECTORY' does not exist or not a directory"
exit 1
fi
function log() {
echo "$(date '+[%H:%M:%S]') $*"
}
function publish-log() {
id=$1
file=$2
curl -T "$file" "$ARTIFACTORY_URL/$id" || true
}
function altool-upload() {
# Since altool uses same file for upload token we have to trick it into using different folders for token file location
# Also it copies zip into TMPDIR so we override it too, to simplify cleanup
OLD_HOME="$HOME"
export HOME="$FAKE_ROOT/home"
export TMPDIR="$FAKE_ROOT/tmp"
mkdir -p "$HOME"
mkdir -p "$TMPDIR"
export _JAVA_OPTIONS="-Duser.home=$HOME -Djava.io.tmpdir=$TMPDIR"
# Reduce amount of downloads, cache transporter libraries
shared_itmstransporter="$OLD_HOME/shared-itmstransporter"
if [[ -f "$shared_itmstransporter" ]]; then
cp -r "$shared_itmstransporter" "$HOME/.itmstransporter"
fi
# For some reason altool prints everything to stderr, not stdout
set +e
xcrun altool --notarize-app \
--username "$APPL_USER" --password "$APPL_PASSWORD" \
--primary-bundle-id "$BUNDLE_ID" \
--asc-provider JetBrainssro --file "$1" 2>&1 | tee "altool.init.out"
unset TMPDIR
export HOME="$OLD_HOME"
set -e
}
#immediately exit script with an error if a command fails
set -euo pipefail
file="$APP_NAME.zip"
log "Zipping $file..."
rm -rf "$file"
ditto -c -k --sequesterRsrc --keepParent "$APP_DIRECTORY/Contents" "$file"
log "Notarizing $file..."
rm -rf "altool.init.out" "altool.check.out"
altool-upload "$file"
rm -rf "$file"
notarization_info="$(grep -e "RequestUUID" "altool.init.out" | grep -oE '([0-9a-f-]{36})')"
if [ -z "$notarization_info" ]; then
log "Faile to read RequestUUID from altool.init.out"
exit 10
fi
PATH="$PATH:/usr/local/bin/"
log "Notarization request sent, awaiting response"
spent=0
while true; do
# For some reason altool prints everything to stderr, not stdout
xcrun altool --username "$APPL_USER" --notarization-info "$notarization_info" --password "$APPL_PASSWORD" >"altool.check.out" 2>&1 || true
status="$(grep -oe 'Status: .*' "altool.check.out" | cut -c 9- || true)"
log "Current status: $status"
if [ "$status" = "invalid" ]; then
log "Notarization failed"
ec=1
elif [ "$status" = "success" ]; then
log "Notarization succeeded"
ec=0
else
if [ "$status" != "in progress" ]; then
log "Unknown notarization status, waiting more, altool output:"
cat "altool.check.out"
fi
if [[ $spent -gt 60 ]]; then
log "Waiting time out (apx 60 minutes)"
ec=2
break
fi
sleep 60
((spent += 1))
continue
fi
developer_log="developer_log.json"
log "Fetching $developer_log"
# TODO: Replace cut with trim or something better
url="$(grep -oe 'LogFileURL: .*' "altool.check.out" | cut -c 13-)"
wget "$url" -O "$developer_log" && cat "$developer_log" || true
if [ $ec != 0 ]; then
log "Publishing $developer_log"
publish-log "$notarization_info" "$developer_log"
fi
break
done
cat "altool.check.out"
rm -rf "altool.init.out" "altool.check.out"
exit $ec

View File

@@ -0,0 +1,93 @@
#!/bin/bash
APP_DIRECTORY=$1
JB_CERT=$2
if [[ -z "$APP_DIRECTORY" ]] || [[ -z "$JB_CERT" ]]; then
echo "Usage: $0 AppDirectory CertificateID"
exit 1
fi
if [[ ! -d "$APP_DIRECTORY" ]]; then
echo "AppDirectory '$APP_DIRECTORY' does not exist or not a directory"
exit 1
fi
function log() {
echo "$(date '+[%H:%M:%S]') $*"
}
#immediately exit script with an error if a command fails
set -euo pipefail
# Cleanup files left from previous sign attempt (if any)
find "$APP_DIRECTORY" -name '*.cstemp' -exec rm '{}' \;
log "Signing libraries and executables..."
# -perm +111 searches for executables
for f in \
"Contents/Home/bin" \
"Contents/Home/lib"; do
if [ -d "$APP_DIRECTORY/$f" ]; then
find "$APP_DIRECTORY/$f" \
-type f \( -name "*.jnilib" -o -name "*.dylib" -o -name "*.so" -o -perm +111 \) \
-exec codesign --timestamp \
-v -s "$JB_CERT" --options=runtime \
--entitlements entitlements.xml {} \;
fi
done
log "Signing libraries in jars in $PWD"
# todo: add set -euo pipefail; into the inner sh -c
# `-e` prevents `grep -q && printf` loginc
# with `-o pipefail` there's no input for 'while' loop
find "$APP_DIRECTORY" -name '*.jar' \
-exec sh -c "set -u; unzip -l \"\$0\" | grep -q -e '\.dylib\$' -e '\.jnilib\$' -e '\.so\$' -e '^jattach\$' && printf \"\$0\0\" " {} \; |
while IFS= read -r -d $'\0' file; do
log "Processing libraries in $file"
rm -rf jarfolder jar.jar
mkdir jarfolder
filename="${file##*/}"
log "Filename: $filename"
cp "$file" jarfolder && (cd jarfolder && jar xf "$filename" && rm "$filename")
find jarfolder \
-type f \( -name "*.jnilib" -o -name "*.dylib" -o -name "*.so" -o -name "jattach" \) \
-exec codesign --timestamp \
-v -s "$JB_CERT" --options=runtime \
--entitlements entitlements.xml {} \;
(cd jarfolder; zip -q -r -o ../jar.jar .)
mv jar.jar "$file"
done
rm -rf jarfolder jar.jar
log "Signing other files..."
for f in \
"Contents/MacOS"; do
if [ -d "$APP_DIRECTORY/$f" ]; then
find "$APP_DIRECTORY/$f" \
-type f \( -name "*.jnilib" -o -name "*.dylib" -o -name "*.so" -o -perm +111 \) \
-exec codesign --timestamp \
-v -s "$JB_CERT" --options=runtime \
--entitlements entitlements.xml {} \;
fi
done
#log "Signing executable..."
#codesign --timestamp \
# -v -s "$JB_CERT" --options=runtime \
# --force \
# --entitlements entitlements.xml "$APP_DIRECTORY/Contents/MacOS/idea"
log "Signing whole app..."
codesign --timestamp \
-v -s "$JB_CERT" --options=runtime \
--force \
--entitlements entitlements.xml "$APP_DIRECTORY"
log "Verifying java is not broken"
find "$APP_DIRECTORY" \
-type f -name 'java' -perm +111 -exec {} -version \;

View File

@@ -0,0 +1,145 @@
#!/bin/bash
#immediately exit script with an error if a command fails
set -euo pipefail
export COPY_EXTENDED_ATTRIBUTES_DISABLE=true
export COPYFILE_DISABLE=true
INPUT_FILE=$1
EXPLODED=$2.exploded
BACKUP_JMODS=$2.backup
USERNAME=$3
PASSWORD=$4
CODESIGN_STRING=$5
NOTARIZE=$6
BUNDLE_ID=$7
cd "$(dirname "$0")"
function log() {
echo "$(date '+[%H:%M:%S]') $*"
}
log "Deleting $EXPLODED ..."
if test -d "$EXPLODED"; then
find "$EXPLODED" -mindepth 1 -maxdepth 1 -exec chmod -R u+wx '{}' \;
fi
rm -rf "$EXPLODED"
mkdir "$EXPLODED"
rm -rf "$BACKUP_JMODS"
mkdir "$BACKUP_JMODS"
log "Unzipping $INPUT_FILE to $EXPLODED ..."
tar -xzvf "$INPUT_FILE" --directory $EXPLODED
rm "$INPUT_FILE"
BUILD_NAME="$(ls "$EXPLODED")"
if test -d $EXPLODED/$BUILD_NAME/Contents/Home/jmods; then
mv $EXPLODED/$BUILD_NAME/Contents/Home/jmods $BACKUP_JMODS
fi
if test -d $EXPLODED/$BUILD_NAME/Contents/Home/Frameworks; then
mv $EXPLODED/$BUILD_NAME/Contents/Home/Frameworks $BACKUP_JMODS
fi
if test -f $EXPLODED/$BUILD_NAME/Contents/MacOS/libjli.dylib; then
mv $EXPLODED/$BUILD_NAME/Contents/MacOS/libjli.dylib $BACKUP_JMODS
fi
#log "$INPUT_FILE unzipped and removed"
log "$INPUT_FILE extracted and removed"
APPLICATION_PATH="$EXPLODED/$BUILD_NAME"
find "$APPLICATION_PATH/Contents/Home/bin" \
-maxdepth 1 -type f -name '*.jnilib' -print0 |
while IFS= read -r -d $'\0' file; do
if [ -f "$file" ]; then
log "Linking $file"
b="$(basename "$file" .jnilib)"
ln -sf "$b.jnilib" "$(dirname "$file")/$b.dylib"
fi
done
find "$APPLICATION_PATH/Contents/" \
-maxdepth 1 -type f -name '*.txt' -print0 |
while IFS= read -r -d $'\0' file; do
if [ -f "$file" ]; then
log "Moving $file"
mv "$file" "$APPLICATION_PATH/Contents/Resources"
fi
done
non_plist=$(find "$APPLICATION_PATH/Contents/" -maxdepth 1 -type f -and -not -name 'Info.plist' | wc -l)
if [[ $non_plist -gt 0 ]]; then
log "Only Info.plist file is allowed in Contents directory but found $non_plist file(s):"
log "$(find "$APPLICATION_PATH/Contents/" -maxdepth 1 -type f -and -not -name 'Info.plist')"
exit 1
fi
log "Unlocking keychain..."
# Make sure *.p12 is imported into local KeyChain
security unlock-keychain -p "$PASSWORD" "/Users/$USERNAME/Library/Keychains/login.keychain"
attempt=1
limit=3
set +e
while [[ $attempt -le $limit ]]; do
log "Signing (attempt $attempt) $APPLICATION_PATH ..."
./sign.sh "$APPLICATION_PATH" "$CODESIGN_STRING"
ec=$?
if [[ $ec -ne 0 ]]; then
((attempt += 1))
if [ $attempt -eq $limit ]; then
set -e
fi
log "Signing failed, wait for 30 sec and try to sign again"
sleep 30
else
log "Signing done"
codesign -v "$APPLICATION_PATH" -vvvvv
log "Check sign done"
((attempt += limit))
fi
done
set -e
if [ "$NOTARIZE" = "yes" ]; then
log "Notarizing..."
# shellcheck disable=SC1090
source "$HOME/.notarize_token"
APP_NAME=$(echo ${INPUT_FILE} | awk -F"." '{ print $1 }')
# Since notarization tool uses same file for upload token we have to trick it into using different folders, hence fake root
# Also it leaves copy of zip file in TMPDIR, so notarize.sh overrides it and uses FAKE_ROOT as location for temp TMPDIR
FAKE_ROOT="$(pwd)/fake-root"
mkdir -p "$FAKE_ROOT"
echo "Notarization will use fake root: $FAKE_ROOT"
./notarize.sh "$APPLICATION_PATH" "$APPLE_USERNAME" "$APPLE_PASSWORD" "$APP_NAME" "$BUNDLE_ID" "$FAKE_ROOT"
rm -rf "$FAKE_ROOT"
set +e
log "Stapling..."
xcrun stapler staple "$APPLICATION_PATH"
else
log "Notarization disabled"
log "Stapling disabled"
fi
log "Zipping $BUILD_NAME to $INPUT_FILE ..."
(
#cd "$EXPLODED"
#ditto -c -k --sequesterRsrc --keepParent "$BUILD_NAME" "../$INPUT_FILE"
if test ! -z $(ls $BACKUP_JMODS/libjli.dylib); then
mv $BACKUP_JMODS/libjli.dylib $EXPLODED/$BUILD_NAME/Contents/MacOS
fi
if test -d $BACKUP_JMODS/jmods; then
mv $BACKUP_JMODS/jmods $EXPLODED/$BUILD_NAME/Contents/Home
fi
if test -d $BACKUP_JMODS/Frameworks; then
mv $BACKUP_JMODS/Frameworks $EXPLODED/$BUILD_NAME/Contents/Home
fi
COPYFILE_DISABLE=1 tar -pczf $INPUT_FILE --exclude='*.dSYM' --exclude='man' -C $EXPLODED $BUILD_NAME
log "Finished zipping"
)
rm -rf "$EXPLODED"
log "Done"

View File

@@ -0,0 +1,9 @@
#!/bin/bash -x
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
script_dir=jb/project/tools/windows/scripts
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jcef" || exit $?
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx" || exit $?
${script_dir}/mkimages_x64.sh $JBSDK_VERSION $JDK_BUILD_NUMBER $build_number "jfx_jcef" || exit $?

View File

@@ -0,0 +1,119 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
bundle_type=$4
function create_jbr {
case "$1" in
"${bundle_type}_lw")
grep -v "jdk.compiler\|jdk.hotspot.agent" modules.list > modules_tmp.list
;;
"jfx" | "jcef" | "jfx_jcef")
cat modules.list > modules_tmp.list
;;
*)
cat modules.list > modules_tmp.list
;;
esac
rm -rf ${JBR_BUNDLE}
${JSDK}/bin/jlink \
--module-path ${JSDK}/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules_tmp.list | sed s/" "//g) --output ${JBR_BUNDLE} || exit $?
if [[ "${bundle_type}" == *jcef* ]]
then
cp -R jcef_win_x64/* ${JBR_BUNDLE}/bin
fi
echo Modifying release info ...
grep -v \"^JAVA_VERSION\" ${JSDK}/release | grep -v \"^MODULES\" >> ${JBR_BUNDLE}/release
}
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
WORK_DIR=$(pwd)
#git checkout -- modules.list src
case "$bundle_type" in
"jfx")
echo "Excluding jcef modules"
git apply -p0 < jb/project/tools/exclude_jcef_module.patch
;;
"jcef")
echo "Excluding jfx modules"
git apply -p0 < jb/project/tools/exclude_jfx_module.patch
;;
esac
PATH="/usr/local/bin:/usr/bin:${PATH}"
if [ -z "$bundle_type" ]; then
bash ./configure \
--disable-warnings-as-errors \
--with-target-bits=64 \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-toolchain-version=${TOOLCHAIN_VERSION} \
--with-boot-jdk=${BOOT_JDK} \
--disable-ccache \
--enable-cds=yes || exit 1
else
bash ./configure \
--disable-warnings-as-errors \
--with-target-bits=64 \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-import-modules=${WORK_DIR}/modular-sdk \
--with-toolchain-version=${TOOLCHAIN_VERSION} \
--with-boot-jdk=${BOOT_JDK} \
--disable-ccache \
--enable-cds=yes || exit 1
fi
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
make LOG=info images CONF=windows-x86_64-server-release test-image || exit 1
else
make LOG=info images CONF=windows-x86_64-server-release || exit 1
fi
JSDK=build/windows-x86_64-server-release/images/jdk
if [[ "$bundle_type" == "*jcef*" || -z "$bundle_type" ]]; then
JBSDK=${JBRSDK_BASE_NAME}-windows-x64-b${build_number}
fi
BASE_DIR=build/windows-x86_64-server-release/images
JBRSDK_BUNDLE=jbrsdk
rm -rf ${BASE_DIR}/${JBRSDK_BUNDLE} && rsync -a --exclude demo --exclude sample ${JSDK}/ ${JBRSDK_BUNDLE} || exit 1
if [[ "$bundle_type" == "*jcef*" ]]; then
cp -R jcef_win_x64/* ${JBRSDK_BUNDLE}/bin
fi
if [ -z "$bundle_type" ]; then
JBR_BUNDLE=jbr
else
JBR_BUNDLE=jbr_${bundle_type}
fi
create_jbr ${bundle_type}
#JBR_BUNDLE=jbr_${bundle_type}_lw
#create_jbr ${bundle_type}_lw

View File

@@ -0,0 +1,57 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-${JBSDK_VERSION}
WORK_DIR=$(pwd)
PATH="/usr/local/bin:/usr/bin:${PATH}"
./configure \
--disable-warnings-as-errors \
--disable-debug-symbols \
--with-target-bits=32 \
--with-version-pre= \
--with-version-build=${JDK_BUILD_NUMBER} \
--with-version-opt=b${build_number} \
--with-toolchain-version=2015 \
--with-boot-jdk=${BOOT_JDK} \
--disable-ccache \
--enable-cds=yes || exit 1
make clean CONF=windows-x86-normal-server-release || exit 1
make LOG=info images CONF=windows-x86-normal-server-release test-image || exit 1
JBSDK=${JBRSDK_BASE_NAME}-windows-x86-b${build_number}
BASE_DIR=build/windows-x86-normal-server-release/images
JSDK=${BASE_DIR}/jdk
JBRSDK_BUNDLE=jbrsdk
rm -rf ${BASE_DIR}/${JBRSDK_BUNDLE} && rsync -a --exclude demo --exclude sample ${JSDK}/ ${JBRSDK_BUNDLE} || exit 1
JBR_BUNDLE=jbr
rm -rf ${JBR_BUNDLE}
grep -v javafx modules.list | grep -v "jdk.internal.vm\|jdk.aot\|jcef" > modules.list.x86
${JSDK}/bin/jlink \
--module-path ${JSDK}/jmods --no-man-pages --compress=2 \
--add-modules $(xargs < modules.list.x86 | sed s/" "//g) --output ${JBR_BUNDLE} || exit $?
echo Modifying release info ...
grep -v \"^JAVA_VERSION\" ${JSDK}/release | grep -v \"^MODULES\" >> ${JBR_BUNDLE}/release

View File

@@ -0,0 +1,79 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
bundle_type=$4
function pack_jbr {
case "$1" in
"${bundle_type}_lw")
JBR_BASE_NAME=jbr_${bundle_type}_lw-${JBSDK_VERSION}
;;
"jfx" | "jcef")
JBR_BASE_NAME=jbr_${bundle_type}-${JBSDK_VERSION}
;;
"jfx_jcef" | "")
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
;;
*)
echo "***ERR*** bundle was not specified" && exit 1
;;
esac
JBR=$JBR_BASE_NAME-windows-x64-b$build_number
echo Creating $JBR.tar.gz ...
if [ ! -z "$bundle_type" ]; then
rm -rf ${BASE_DIR}/jbr
cp -R ${BASE_DIR}/${JBR_BUNDLE} ${BASE_DIR}/jbr
fi
/usr/bin/tar -czf $JBR.tar.gz -C $BASE_DIR jbr || exit 1
#rm -rf ${BASE_DIR}/${JBR_BUNDLE}
}
JBRSDK_BASE_NAME=jbrsdk-$JBSDK_VERSION
JBR_BASE_NAME=jbr-$JBSDK_VERSION
IMAGES_DIR=build/windows-x86_64-server-release/images
JSDK=$IMAGES_DIR/jdk
JBSDK=$JBRSDK_BASE_NAME-windows-x64-b$build_number
BASE_DIR=.
if [ -z "$bundle_type" ]; then
JBR_BUNDLE=jbr
else
JBR_BUNDLE=jbr_${bundle_type}
fi
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
JBRSDK_BUNDLE=jbrsdk
echo Creating $JBSDK.tar.gz ...
/usr/bin/tar -czf $JBSDK.tar.gz $JBRSDK_BUNDLE || exit 1
fi
JBR_BUNDLE=jbr_${bundle_type}
pack_jbr $bundle_type
if [[ "$bundle_type" == "jfx_jcef" || -z "$bundle_type" ]]; then
JBRSDK_TEST=$JBRSDK_BASE_NAME-windows-test-x64-b$build_number
echo Creating $JBRSDK_TEST.tar.gz ...
/usr/bin/tar -czf $JBRSDK_TEST.tar.gz -C $IMAGES_DIR --exclude='test/jdk/demos' test || exit 1
fi

View File

@@ -0,0 +1,45 @@
#!/bin/bash -x
# The following parameters must be specified:
# JBSDK_VERSION - specifies the current version of OpenJDK e.g. 11_0_6
# JDK_BUILD_NUMBER - specifies the number of OpenJDK build or the value of --with-version-build argument to configure
# build_number - specifies the number of JetBrainsRuntime build
# bundle_type - specifies bundle to bu built; possible values:
# jcef - the bundles 1) jbr with jcef+javafx, 2) jbrsdk and 3) test will be created
# jfx - the bundle 1) jbr with javafx only will be created
#
# jbrsdk-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
# jbr-${JBSDK_VERSION}-osx-x64-b${build_number}.tar.gz
#
# $ ./java --version
# openjdk 11.0.6 2020-01-14
# OpenJDK Runtime Environment (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number})
# OpenJDK 64-Bit Server VM (build 11.0.6+${JDK_BUILD_NUMBER}-b${build_number}, mixed mode)
#
JBSDK_VERSION=$1
JDK_BUILD_NUMBER=$2
build_number=$3
JBRSDK_BASE_NAME=jbrsdk-$JBSDK_VERSION
JBR_BASE_NAME=jbr-$JBSDK_VERSION
IMAGES_DIR=build/windows-x86-normal-server-release/images
JSDK=$IMAGES_DIR/jdk
JBSDK=$JBRSDK_BASE_NAME-windows-x86-b$build_number
BASE_DIR=.
JBRSDK_BUNDLE=jbrsdk
echo Creating $JBSDK.tar.gz ...
/usr/bin/tar -czf $JBSDK.tar.gz $JBRSDK_BUNDLE || exit 1
JBR_BUNDLE=jbr
JBR_BASE_NAME=jbr-${JBSDK_VERSION}
JBR=$JBR_BASE_NAME-windows-x86-b$build_number
echo Creating $JBR.tar.gz ...
/usr/bin/tar -czf $JBR.tar.gz -C $BASE_DIR ${JBR_BUNDLE} || exit 1
JBRSDK_TEST=$JBRSDK_BASE_NAME-windows-test-x86-b$build_number
echo Creating $JBRSDK_TEST.tar.gz ...
/usr/bin/tar -czf $JBRSDK_TEST.tar.gz -C $IMAGES_DIR --exclude='test/jdk/demos' test || exit 1

View File

@@ -242,6 +242,16 @@ ifneq ($(filter product-bundles% legacy-bundles, $(MAKECMDGOALS)), )
)
JDK_SYMBOLS_BUNDLE_FILES := \
$(filter \
$(JDK_SYMBOLS_EXCLUDE_PATTERN) \
$(SYMBOLS_EXCLUDE_PATTERN) \
, \
$(filter-out \
$(JDK_IMAGE_HOMEDIR)/demo/% %.stripped.pdb \
, \
$(ALL_JDK_SYMBOLS_FILES) \
) \
) \
$(call FindFiles, $(SYMBOLS_IMAGE_DIR))
TEST_DEMOS_BUNDLE_FILES := $(filter $(JDK_DEMOS_IMAGE_HOMEDIR)/demo/%, \
@@ -373,7 +383,7 @@ ifneq ($(filter product-bundles% legacy-bundles, $(MAKECMDGOALS)), )
$(eval $(call SetupBundleFile, BUILD_JDK_SYMBOLS_BUNDLE, \
BUNDLE_NAME := $(JDK_SYMBOLS_BUNDLE_NAME), \
FILES := $(JDK_SYMBOLS_BUNDLE_FILES), \
BASE_DIRS := $(SYMBOLS_IMAGE_DIR), \
BASE_DIRS := $(JDK_SYMBOLS_IMAGE_DIR) $(wildcard $(SYMBOLS_IMAGE_DIR)), \
SUBDIR := $(JDK_BUNDLE_SUBDIR), \
UNZIP_DEBUGINFO := true, \
))
@@ -410,43 +420,17 @@ endif
################################################################################
ifneq ($(filter docs-jdk-bundles, $(MAKECMDGOALS)), )
DOCS_JDK_BUNDLE_FILES := $(call FindFiles, $(DOCS_JDK_IMAGE_DIR))
ifneq ($(filter docs-bundles, $(MAKECMDGOALS)), )
DOCS_BUNDLE_FILES := $(call FindFiles, $(DOCS_IMAGE_DIR))
$(eval $(call SetupBundleFile, BUILD_DOCS_JDK_BUNDLE, \
BUNDLE_NAME := $(DOCS_JDK_BUNDLE_NAME), \
FILES := $(DOCS_JDK_BUNDLE_FILES), \
BASE_DIRS := $(DOCS_JDK_IMAGE_DIR), \
$(eval $(call SetupBundleFile, BUILD_DOCS_BUNDLE, \
BUNDLE_NAME := $(DOCS_BUNDLE_NAME), \
FILES := $(DOCS_BUNDLE_FILES), \
BASE_DIRS := $(DOCS_IMAGE_DIR), \
SUBDIR := docs, \
))
DOCS_JDK_TARGETS += $(BUILD_DOCS_JDK_BUNDLE)
endif
ifneq ($(filter docs-javase-bundles, $(MAKECMDGOALS)), )
DOCS_JAVASE_BUNDLE_FILES := $(call FindFiles, $(DOCS_JAVASE_IMAGE_DIR))
$(eval $(call SetupBundleFile, BUILD_DOCS_JAVASE_BUNDLE, \
BUNDLE_NAME := $(DOCS_JAVASE_BUNDLE_NAME), \
FILES := $(DOCS_JAVASE_BUNDLE_FILES), \
BASE_DIRS := $(DOCS_JAVASE_IMAGE_DIR), \
SUBDIR := docs-javase, \
))
DOCS_JAVASE_TARGETS += $(BUILD_DOCS_JAVASE_BUNDLE)
endif
ifneq ($(filter docs-reference-bundles, $(MAKECMDGOALS)), )
DOCS_REFERENCE_BUNDLE_FILES := $(call FindFiles, $(DOCS_REFERENCE_IMAGE_DIR))
$(eval $(call SetupBundleFile, BUILD_DOCS_REFERENCE_BUNDLE, \
BUNDLE_NAME := $(DOCS_REFERENCE_BUNDLE_NAME), \
FILES := $(DOCS_REFERENCE_BUNDLE_FILES), \
BASE_DIRS := $(DOCS_REFERENCE_IMAGE_DIR), \
SUBDIR := docs-reference, \
))
DOCS_REFERENCE_TARGETS += $(BUILD_DOCS_REFERENCE_BUNDLE)
DOCS_TARGETS += $(BUILD_DOCS_BUNDLE)
endif
################################################################################
@@ -495,12 +479,9 @@ $(eval $(call IncludeCustomExtension, Bundles.gmk))
product-bundles: $(PRODUCT_TARGETS)
legacy-bundles: $(LEGACY_TARGETS)
test-bundles: $(TEST_TARGETS)
docs-jdk-bundles: $(DOCS_JDK_TARGETS)
docs-javase-bundles: $(DOCS_JAVASE_TARGETS)
docs-reference-bundles: $(DOCS_REFERENCE_TARGETS)
docs-bundles: $(DOCS_TARGETS)
static-libs-bundles: $(STATIC_LIBS_TARGETS)
jcov-bundles: $(JCOV_TARGETS)
.PHONY: all default product-bundles test-bundles \
docs-jdk-bundles docs-javase-bundles docs-reference-bundles \
.PHONY: all default product-bundles test-bundles docs-bundles \
static-libs-bundles jcov-bundles

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -45,6 +45,11 @@ $(OUTPUTDIR)/compile_commands.json: $(wildcard $(MAKESUPPORT_OUTPUTDIR)/compile-
$(RM) $@
$(FIND) $(MAKESUPPORT_OUTPUTDIR)/compile-commands/ -name \*.json | \
$(SORT) | $(XARGS) $(CAT) >> $@.tmp
$(if $(FIXPATH),$(FIXPATH) $(AWK) 'BEGIN { \
tmpfile = substr(ARGV[2],2); \
cmd = "$(CP) " "\047" tmpfile "\047" " $@.tmp"; \
system(cmd); \
}' -- @$@.tmp)
$(SED) -e '1s/^/[\$(NEWLINE)/' -e '$(DOLLAR)s/,\s\{0,\}$(DOLLAR)/\$(NEWLINE)]/' $@.tmp > $@
$(RM) $@.tmp

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -49,9 +49,8 @@ TARGETS += $(patsubst %, $(BUILDTOOLS_OUTPUTDIR)/gensrc/%/module-info.java, \
$(INTERIM_LANGTOOLS_MODULES))
$(eval $(call SetupCopyFiles, COPY_PREVIEW_FEATURES, \
FILES := $(TOPDIR)/src/java.base/share/classes/jdk/internal/javac/PreviewFeature.java \
$(TOPDIR)/src/java.base/share/classes/jdk/internal/javac/NoPreview.java, \
DEST := $(BUILDTOOLS_OUTPUTDIR)/gensrc/java.base.interim/jdk/internal/javac/, \
FILES := $(TOPDIR)/src/java.base/share/classes/jdk/internal/PreviewFeature.java, \
DEST := $(BUILDTOOLS_OUTPUTDIR)/gensrc/java.base.interim/jdk/internal/, \
))
TARGETS += $(COPY_PREVIEW_FEATURES)
@@ -75,15 +74,15 @@ define SetupInterimModule
EXCLUDE_FILES := $(TOPDIR)/src/$1/share/classes/module-info.java \
Standard.java, \
EXTRA_FILES := $(BUILDTOOLS_OUTPUTDIR)/gensrc/$1.interim/module-info.java, \
COPY := .gif .png .xml .css .js .js.template .txt javax.tools.JavaCompilerTool, \
COPY := .gif .png .xml .css .js javax.tools.JavaCompilerTool, \
BIN := $(BUILDTOOLS_OUTPUTDIR)/interim_langtools_modules/$1.interim, \
DISABLED_WARNINGS := module options, \
JAVAC_FLAGS := \
--module-path $(BUILDTOOLS_OUTPUTDIR)/interim_langtools_modules \
$$(INTERIM_LANGTOOLS_ADD_EXPORTS) \
--patch-module java.base=$(BUILDTOOLS_OUTPUTDIR)/gensrc/java.base.interim \
--add-exports java.base/jdk.internal.javac=java.compiler.interim \
--add-exports java.base/jdk.internal.javac=jdk.compiler.interim, \
--add-exports java.base/jdk.internal=java.compiler.interim \
--add-exports java.base/jdk.internal=jdk.compiler.interim, \
))
$1_DEPS_INTERIM := $$(addsuffix .interim, $$(filter \

View File

@@ -31,6 +31,507 @@ include MakeBase.gmk
include Modules.gmk
include JavaCompilation.gmk
# Hook to include the corresponding custom file, if present.
$(eval $(call IncludeCustomExtension, CompileJavaModules.gmk))
################################################################################
# Module specific build settings
java.base_DOCLINT += -Xdoclint:all/protected,-reference,-accessibility \
'-Xdoclint/package:java.*,javax.*'
java.base_JAVAC_FLAGS += -XDstringConcat=inline
java.base_COPY += .icu .dat .spp .nrm content-types.properties \
hijrah-config-Hijrah-umalqura_islamic-umalqura.properties
java.base_CLEAN += intrinsic.properties
java.base_EXCLUDE_FILES += \
$(TOPDIR)/src/java.base/share/classes/jdk/internal/module/ModuleLoaderMap.java
java.base_EXCLUDES += java/lang/doc-files
# Exclude BreakIterator classes that are just used in compile process to generate
# data files and shouldn't go in the product
java.base_EXCLUDE_FILES += sun/text/resources/BreakIteratorRules.java
ifeq ($(call isTargetOs, macosx aix), false)
java.base_EXCLUDE_FILES += sun/nio/fs/PollingWatchService.java
endif
ifeq ($(call isTargetOs, windows), true)
java.base_EXCLUDE_FILES += \
sun/nio/ch/SimpleAsynchronousFileChannelImpl.java \
#
endif
################################################################################
java.compiler_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.datatransfer_DOCLINT += -Xdoclint:all/protected,-reference \
'-Xdoclint/package:java.*,javax.*'
java.datatransfer_COPY += flavormap.properties
################################################################################
java.desktop_DOCLINT += -Xdoclint:all/protected,-reference \
'-Xdoclint/package:java.*,javax.*'
java.desktop_COPY += .gif .png .wav .txt .xml .css .pf
java.desktop_CLEAN += iio-plugin.properties cursors.properties
java.desktop_EXCLUDES += \
java/awt/doc-files \
javax/swing/doc-files \
javax/swing/text/doc-files \
javax/swing/plaf/synth/doc-files \
javax/swing/undo/doc-files \
sun/awt/X11/doc-files \
#
java.desktop_EXCLUDE_FILES += \
javax/swing/plaf/nimbus/InternalFrameTitlePanePainter.java \
javax/swing/plaf/nimbus/OptionPaneMessageAreaPainter.java \
javax/swing/plaf/nimbus/ScrollBarPainter.java \
javax/swing/plaf/nimbus/SliderPainter.java \
javax/swing/plaf/nimbus/SpinnerPainter.java \
javax/swing/plaf/nimbus/SplitPanePainter.java \
javax/swing/plaf/nimbus/TabbedPanePainter.java \
sun/awt/resources/security-icon-bw16.png \
sun/awt/resources/security-icon-bw24.png \
sun/awt/resources/security-icon-bw32.png \
sun/awt/resources/security-icon-bw48.png \
sun/awt/resources/security-icon-interim16.png \
sun/awt/resources/security-icon-interim24.png \
sun/awt/resources/security-icon-interim32.png \
sun/awt/resources/security-icon-interim48.png \
sun/awt/resources/security-icon-yellow16.png \
sun/awt/resources/security-icon-yellow24.png \
sun/awt/resources/security-icon-yellow32.png \
sun/awt/resources/security-icon-yellow48.png \
sun/awt/X11/java-icon16.png \
sun/awt/X11/java-icon24.png \
sun/awt/X11/java-icon32.png \
sun/awt/X11/java-icon48.png \
.template \
#
ifeq ($(call isTargetOs, macosx), true)
# exclude all X11 on Mac.
java.desktop_EXCLUDES += \
sun/awt/X11 \
sun/java2d/x11 \
sun/java2d/jules \
sun/java2d/xr \
com/sun/java/swing/plaf/gtk \
#
java.desktop_EXCLUDE_FILES += \
$(wildcard $(TOPDIR)/src/java.desktop/unix/classes/sun/java2d/*.java) \
$(wildcard $(TOPDIR)/src/java.desktop/unix/classes/sun/java2d/opengl/*.java) \
$(wildcard $(TOPDIR)/src/java.desktop/unix/classes/sun/awt/*.java) \
$(wildcard $(TOPDIR)/src/java.desktop/unix/classes/sun/font/*.java) \
#
else
# TBD: figure out how to eliminate this long list
java.desktop_EXCLUDE_FILES += \
sun/awt/X11/ScreenFormat.java \
sun/awt/X11/XArc.java \
sun/awt/X11/XChar2b.java \
sun/awt/X11/XCharStruct.java \
sun/awt/X11/XClassHint.java \
sun/awt/X11/XComposeStatus.java \
sun/awt/X11/XExtCodes.java \
sun/awt/X11/XFontProp.java \
sun/awt/X11/XFontSetExtents.java \
sun/awt/X11/XFontStruct.java \
sun/awt/X11/XGCValues.java \
sun/awt/X11/XHostAddress.java \
sun/awt/X11/XIMCallback.java \
sun/awt/X11/XIMHotKeyTrigger.java \
sun/awt/X11/XIMHotKeyTriggers.java \
sun/awt/X11/XIMPreeditCaretCallbackStruct.java \
sun/awt/X11/XIMPreeditDrawCallbackStruct.java \
sun/awt/X11/XIMPreeditStateNotifyCallbackStruct.java \
sun/awt/X11/XIMStatusDrawCallbackStruct.java \
sun/awt/X11/XIMStringConversionCallbackStruct.java \
sun/awt/X11/XIMStringConversionText.java \
sun/awt/X11/XIMStyles.java \
sun/awt/X11/XIMText.java \
sun/awt/X11/XIMValuesList.java \
sun/awt/X11/XImage.java \
sun/awt/X11/XKeyboardControl.java \
sun/awt/X11/XKeyboardState.java \
sun/awt/X11/XOMCharSetList.java \
sun/awt/X11/XOMFontInfo.java \
sun/awt/X11/XOMOrientation.java \
sun/awt/X11/XPoint.java \
sun/awt/X11/XRectangle.java \
sun/awt/X11/XSegment.java \
sun/awt/X11/XStandardColormap.java \
sun/awt/X11/XTextItem.java \
sun/awt/X11/XTextItem16.java \
sun/awt/X11/XTextProperty.java \
sun/awt/X11/XTimeCoord.java \
sun/awt/X11/XWindowChanges.java \
sun/awt/X11/XdbeSwapInfo.java \
sun/awt/X11/XmbTextItem.java \
sun/awt/X11/XwcTextItem.java
endif
ifeq ($(call isTargetOs, windows), true)
java.desktop_EXCLUDES += com/sun/java/swing/plaf/gtk
endif
ifdef BUILD_HEADLESS_ONLY
java.desktop_EXCLUDES += sun/applet
endif
ifeq ($(call isTargetOs, windows macosx), false)
java.desktop_EXCLUDE_FILES += sun/awt/AWTCharset.java
endif
# These files do not appear in the build result of the old build. This
# is because they are generated sources, but the AUTO_JAVA_FILES won't
# pick them up since they aren't generated when the source dirs are
# searched and they aren't referenced by any other classes so they won't
# be picked up by implicit compilation. On a rebuild, they are picked up
# and compiled. Exclude them here to produce the same rt.jar as the old
# build does when building just once.
java.desktop_EXCLUDE_FILES += \
javax/swing/plaf/nimbus/InternalFrameTitlePanePainter.java \
javax/swing/plaf/nimbus/OptionPaneMessageAreaPainter.java \
javax/swing/plaf/nimbus/ScrollBarPainter.java \
javax/swing/plaf/nimbus/SliderPainter.java \
javax/swing/plaf/nimbus/SpinnerPainter.java \
javax/swing/plaf/nimbus/SplitPanePainter.java \
javax/swing/plaf/nimbus/TabbedPanePainter.java \
#
################################################################################
java.scripting_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
java.scripting_COPY += .js
java.scripting_CLEAN += .properties
################################################################################
java.instrument_DOCLINT += -Xdoclint:all/protected,-accessibility \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.logging_DOCLINT += -Xdoclint:all/protected,-reference,-accessibility \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.management_DOCLINT += -Xdoclint:all/protected,-reference,-accessibility \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.management.rmi_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:javax.*'
################################################################################
java.prefs_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.transaction.xa_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:javax.*'
################################################################################
java.sql_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.sql.rowset_DOCLINT += -Xdoclint:all/protected,-accessibility \
'-Xdoclint/package:java.*,javax.*'
java.sql.rowset_CLEAN_FILES += $(wildcard \
$(TOPDIR)/src/java.sql.rowset/share/classes/com/sun/rowset/*.properties \
$(TOPDIR)/src/java.sql.rowset/share/classes/javax/sql/rowset/*.properties)
################################################################################
java.rmi_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
java.rmi_CLEAN_FILES += $(wildcard \
$(TOPDIR)/src/java.rmi/share/classes/sun/rmi/registry/resources/*.properties \
$(TOPDIR)/src/java.rmi/share/classes/sun/rmi/server/resources/*.properties)
################################################################################
java.xml_DOCLINT += -Xdoclint:all/protected,-accessibility \
'-Xdoclint/package:$(call CommaList, javax.xml.catalog javax.xml.datatype \
javax.xml.transform javax.xml.validation javax.xml.xpath)'
java.xml_CLEAN += .properties
################################################################################
java.naming_DOCLINT += -Xdoclint:all/protected,-accessibility \
'-Xdoclint/package:java.*,javax.*'
java.naming_CLEAN += jndiprovider.properties
################################################################################
java.security.jgss_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.smartcardio_DOCLINT += -Xdoclint:all/protected,-accessibility \
'-Xdoclint/package:java.*,javax.*'
################################################################################
java.xml.crypto_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:java.*,javax.*'
java.xml.crypto_COPY += .dtd .xml
java.xml.crypto_CLEAN += .properties
################################################################################
jdk.charsets_COPY += .dat
################################################################################
################################################################################
jdk.compiler_DOCLINT += -Xdoclint:all/protected \
'-Xdoclint/package:-com.sun.tools.*,-jdk.internal.*,sun.tools.serialver.resources.*'
jdk.compiler_JAVAC_FLAGS += -XDstringConcat=inline
jdk.compiler_CLEAN_FILES += $(wildcard \
$(patsubst %, $(TOPDIR)/src/jdk.compiler/share/classes/%/*.properties, \
sun/tools/serialver/resources))
################################################################################
jdk.hotspot.agent_DISABLED_WARNINGS += rawtypes serial cast static overrides \
fallthrough
jdk.hotspot.agent_COPY += .gif .png .properties
################################################################################
jdk.editpad_COPY += .properties
################################################################################
jdk.jshell_COPY += .jsh .properties
################################################################################
jdk.internal.le_COPY += .properties .caps .txt
################################################################################
jdk.internal.opt_COPY += .properties
################################################################################
jdk.jcmd_COPY += _options
################################################################################
jdk.dynalink_CLEAN += .properties
################################################################################
jdk.javadoc_COPY += .xml .css .js .png
################################################################################
jdk.jartool_JAVAC_FLAGS += -XDstringConcat=inline
################################################################################
# No SCTP implementation on Mac OS X or AIX. These classes should be excluded.
SCTP_IMPL_CLASSES = \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/AssociationChange.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/AssociationImpl.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/PeerAddrChange.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/ResultContainer.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SctpChannelImpl.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SctpMultiChannelImpl.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SctpNet.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SctpNotification.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SctpServerChannelImpl.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/SendFailed.java \
$(TOPDIR)/src/jdk.sctp/unix/classes/sun/nio/ch/sctp/Shutdown.java
ifeq ($(call isTargetOs, macosx), true)
jdk.sctp_EXCLUDE_FILES += $(SCTP_IMPL_CLASSES)
endif
ifeq ($(call isTargetOs, aix), true)
jdk.sctp_EXCLUDE_FILES += $(SCTP_IMPL_CLASSES)
endif
################################################################################
jdk.incubator.jpackage_COPY += .gif .png .txt .spec .script .prerm .preinst \
.postrm .postinst .list .sh .desktop .copyright .control .plist .template \
.icns .scpt .wxs .wxl .wxi .ico .bmp .tiff
jdk.incubator.jpackage_CLEAN += .properties
################################################################################
jdk.jconsole_COPY += .gif .png
jdk.jconsole_CLEAN_FILES += $(wildcard \
$(TOPDIR)/src/jdk.jconsole/share/classes/sun/tools/jconsole/resources/*.properties)
################################################################################
jdk.jdeps_COPY += .txt
jdk.jdeps_CLEAN_FILES += $(wildcard \
$(TOPDIR)/src/jdk.jdeps/share/classes/com/sun/tools/jdeps/resources/*.properties \
$(TOPDIR)/src/jdk.jdeps/share/classes/com/sun/tools/javap/resources/*.properties)
################################################################################
jdk.jdi_EXCLUDES += \
com/sun/tools/example/debug/bdi \
com/sun/tools/example/debug/event \
com/sun/tools/example/debug/gui \
com/sun/jdi/doc-files \
#
jdk.jdi_EXCLUDE_FILES += jdi-overview.html
################################################################################
jdk.dev_CLEAN_FILES += $(wildcard \
$(patsubst %, $(TOPDIR)/src/jdk.dev/share/classes/%/*.properties, \
com/sun/tools/script/shell))
jdk.dev_COPY += .js oqlhelp.html .txt
################################################################################
jdk.internal.jvmstat_COPY += aliasmap
################################################################################
# -parameters provides method's parameters information in class file,
# JVMCI compilers make use of that information for various sanity checks.
# Don't use Indy strings concatenation to have good JVMCI startup performance.
# The exports are needed since JVMCI is dynamically exported (see
# jdk.vm.ci.services.internal.ReflectionAccessJDK::openJVMCITo).
jdk.internal.vm.ci_JAVAC_FLAGS += -parameters -XDstringConcat=inline
################################################################################
jdk.internal.vm.compiler_JAVAC_FLAGS += -parameters -XDstringConcat=inline \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.aarch64=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.amd64=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code.site=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code.stack=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.common=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot.aarch64=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot.amd64=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.meta=jdk.internal.vm.compiler \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.runtime=jdk.internal.vm.compiler \
#
jdk.internal.vm.compiler_EXCLUDES += \
jdk.internal.vm.compiler.collections.test \
jdk.tools.jaotc.test \
org.graalvm.compiler.api.directives.test \
org.graalvm.compiler.api.test \
org.graalvm.compiler.asm.aarch64.test \
org.graalvm.compiler.asm.amd64.test \
org.graalvm.compiler.asm.test \
org.graalvm.compiler.core.aarch64.test \
org.graalvm.compiler.core.amd64.test \
org.graalvm.compiler.core.jdk9.test \
org.graalvm.compiler.core.match.processor \
org.graalvm.compiler.core.test \
org.graalvm.compiler.debug.test \
org.graalvm.compiler.graph.test \
org.graalvm.compiler.hotspot.aarch64.test \
org.graalvm.compiler.hotspot.amd64.test \
org.graalvm.compiler.hotspot.jdk15.test \
org.graalvm.compiler.hotspot.jdk9.test \
org.graalvm.compiler.hotspot.lir.test \
org.graalvm.compiler.hotspot.test \
org.graalvm.compiler.jtt \
org.graalvm.compiler.lir.jtt \
org.graalvm.compiler.lir.test \
org.graalvm.compiler.loop.test \
org.graalvm.compiler.microbenchmarks \
org.graalvm.compiler.nodeinfo.processor \
org.graalvm.compiler.nodes.test \
org.graalvm.compiler.options.processor \
org.graalvm.compiler.options.test \
org.graalvm.compiler.phases.common.test \
org.graalvm.compiler.processor \
org.graalvm.compiler.replacements.jdk10.test \
org.graalvm.compiler.replacements.jdk12.test \
org.graalvm.compiler.replacements.jdk9.test \
org.graalvm.compiler.replacements.processor \
org.graalvm.compiler.replacements.test \
org.graalvm.compiler.serviceprovider.processor \
org.graalvm.compiler.test \
org.graalvm.compiler.virtual.bench \
org.graalvm.micro.benchmarks \
org.graalvm.util.test \
#
################################################################################
# -parameters provides method's parameters information in class file,
# JVMCI compilers make use of that information for various sanity checks.
# Don't use Indy strings concatenation to have good JAOTC startup performance.
# The exports are needed since JVMCI is dynamically exported (see
# jdk.vm.ci.services.internal.ReflectionAccessJDK::openJVMCITo).
jdk.aot_JAVAC_FLAGS += -parameters -XDstringConcat=inline \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.aarch64=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.amd64=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code.site=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.code.stack=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.common=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot.aarch64=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.hotspot.amd64=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.meta=jdk.internal.vm.compiler,jdk.aot \
--add-exports jdk.internal.vm.ci/jdk.vm.ci.runtime=jdk.internal.vm.compiler,jdk.aot \
#
jdk.aot_EXCLUDES += \
jdk.tools.jaotc.test
#
################################################################################
sun.charsets_COPY += .dat
################################################################################
jdk.localedata_COPY += _dict _th
# Exclude BreakIterator classes that are just used in compile process to generate
# data files and shouldn't go in the product
jdk.localedata_EXCLUDE_FILES += sun/text/resources/ext/BreakIteratorRules_th.java
jdk.localedata_KEEP_ALL_TRANSLATIONS := true
################################################################################
jdk.jfr_DISABLED_WARNINGS += exports
jdk.jfr_COPY := .xsd .xml .dtd
jdk.jfr_JAVAC_FLAGS := -XDstringConcat=inline
################################################################################
# If this is an imported module that has prebuilt classes, only compile
# module-info.java.
@@ -67,8 +568,60 @@ MODULESOURCEPATH := $(call GetModuleSrcPath)
# Add imported modules to the modulepath
MODULEPATH := $(call PathList, $(IMPORT_MODULES_CLASSES))
ifeq ($(MODULE), jdk.internal.vm.ci)
## WORKAROUND jdk.internal.vm.ci source structure issue
JVMCI_MODULESOURCEPATH := $(MODULESOURCEPATH) \
$(subst /$(MODULE)/,/*/, $(filter-out %processor/src, \
$(wildcard $(TOPDIR)/src/$(MODULE)/share/classes/*/src)))
MODULESOURCEPATH := $(call PathList, $(JVMCI_MODULESOURCEPATH))
endif
ifeq ($(MODULE), jdk.internal.vm.compiler)
## WORKAROUND jdk.internal.vm.compiler source structure issue
VM_COMPILER_MODULESOURCEPATH := $(MODULESOURCEPATH) \
$(subst /$(MODULE)/,/*/, $(filter-out %processor/src %test/src %jtt/src %bench/src %microbenchmarks/src, \
$(wildcard $(TOPDIR)/src/$(MODULE)/share/classes/*/src)))
MODULESOURCEPATH := $(call PathList, $(VM_COMPILER_MODULESOURCEPATH))
endif
ifeq ($(MODULE), jdk.aot)
## WORKAROUND jdk.aot source structure issue
AOT_MODULESOURCEPATH := $(MODULESOURCEPATH) \
$(subst /$(MODULE)/,/*/, $(filter-out %processor/src, \
$(wildcard $(TOPDIR)/src/$(MODULE)/share/classes/*/src)))
MODULESOURCEPATH := $(call PathList, $(AOT_MODULESOURCEPATH))
endif
$(eval $(call SetupJavaCompilation, $(MODULE), \
SMALL_JAVA := false, \
MODULE := $(MODULE), \
SRC := $(wildcard $(MODULE_SRC_DIRS)), \
INCLUDES := $(JDK_USER_DEFINED_FILTER), \
FAIL_NO_SRC := $(FAIL_NO_SRC), \
BIN := $(if $($(MODULE)_BIN), $($(MODULE)_BIN), $(JDK_OUTPUTDIR)/modules), \
HEADERS := $(SUPPORT_OUTPUTDIR)/headers, \
CREATE_API_DIGEST := true, \
JAVAC_FLAGS := \
$($(MODULE)_DOCLINT) \
$($(MODULE)_JAVAC_FLAGS) \
--module-source-path $(MODULESOURCEPATH) \
--module-path $(MODULEPATH) \
--system none, \
))
TARGETS += $($(MODULE)) $($(MODULE)_COPY_EXTRA)
# Declare dependencies between java compilations of different modules.
# Since the other modules are declared in different invocations of this file,
# use the macro to find the correct target file to depend on.
# Only the javac compilation actually depends on other modules so limit
# dependency declaration to that by using the *_COMPILE_TARGET variable.
$($(MODULE)_COMPILE_TARGET): $(foreach d, $(call FindDepsForModule, $(MODULE)), \
$(call SetupJavaCompilationApiTarget, $d, \
$(if $($d_BIN), $($d_BIN), $(JDK_OUTPUTDIR)/modules/$d)))
################################################################################
# Copy zh_HK properties files from zh_TW (needed by some modules)
# Copy zh_HK properties files from zh_TW
$(JDK_OUTPUTDIR)/modules/%_zh_HK.properties: $(JDK_OUTPUTDIR)/modules/%_zh_TW.properties
$(install-file)
@@ -83,48 +636,13 @@ CreateHkTargets = \
.properties \
)
################################################################################
# Include module specific build settings
ifeq ($(MODULE), java.sql.rowset)
TARGETS += $(call CreateHkTargets, $(java.sql.rowset_CLEAN_FILES))
endif
-include Java.gmk
################################################################################
# Setup the main compilation
$(eval $(call SetupJavaCompilation, $(MODULE), \
SMALL_JAVA := false, \
MODULE := $(MODULE), \
SRC := $(wildcard $(MODULE_SRC_DIRS)), \
INCLUDES := $(JDK_USER_DEFINED_FILTER), \
FAIL_NO_SRC := $(FAIL_NO_SRC), \
BIN := $(if $($(MODULE)_BIN), $($(MODULE)_BIN), $(JDK_OUTPUTDIR)/modules), \
HEADERS := $(SUPPORT_OUTPUTDIR)/headers, \
CREATE_API_DIGEST := true, \
CLEAN := $(CLEAN), \
CLEAN_FILES := $(CLEAN_FILES), \
COPY := $(COPY), \
DISABLED_WARNINGS := $(DISABLED_WARNINGS_java), \
EXCLUDES := $(EXCLUDES), \
EXCLUDE_FILES := $(EXCLUDE_FILES), \
KEEP_ALL_TRANSLATIONS := $(KEEP_ALL_TRANSLATIONS), \
JAVAC_FLAGS := \
$(DOCLINT) \
$(JAVAC_FLAGS) \
--module-source-path $(MODULESOURCEPATH) \
--module-path $(MODULEPATH) \
--system none, \
))
TARGETS += $($(MODULE))
# Declare dependencies between java compilations of different modules.
# Since the other modules are declared in different invocations of this file,
# use the macro to find the correct target file to depend on.
# Only the javac compilation actually depends on other modules so limit
# dependency declaration to that by using the *_COMPILE_TARGET variable.
$($(MODULE)_COMPILE_TARGET): $(foreach d, $(call FindDepsForModule, $(MODULE)), \
$(call SetupJavaCompilationApiTarget, $d, \
$(if $($d_BIN), $($d_BIN), $(JDK_OUTPUTDIR)/modules/$d)))
ifeq ($(MODULE), java.rmi)
TARGETS += $(call CreateHkTargets, $(java.rmi_CLEAN_FILES))
endif
################################################################################
# If this is an imported module, copy the pre built classes and resources into
@@ -148,6 +666,10 @@ endif
################################################################################
$(eval $(call IncludeCustomExtension, CompileJavaModules-post.gmk))
################################################################################
all: $(TARGETS)
.PHONY: all

View File

@@ -33,20 +33,8 @@ include JavaCompilation.gmk
TOOLS_CLASSES_DIR := $(BUILDTOOLS_OUTPUTDIR)/tools_jigsaw_classes
# When using an external BUILDJDK, make it possible to shortcut building of
# these tools using the BUILD_JAVAC instead of having to build the complete
# exploded image first.
ifeq ($(EXTERNAL_BUILDJDK), true)
COMPILER := buildjdk
TARGET_RELEASE := $(TARGET_RELEASE_NEWJDK)
else
COMPILER := interim
TARGET_RELEASE := $(TARGET_RELEASE_NEWJDK_UPGRADED)
endif
$(eval $(call SetupJavaCompilation, BUILD_JIGSAW_TOOLS, \
TARGET_RELEASE := $(TARGET_RELEASE), \
COMPILER := $(COMPILER), \
TARGET_RELEASE := $(TARGET_RELEASE_NEWJDK_UPGRADED), \
SRC := $(TOPDIR)/make/jdk/src/classes, \
INCLUDES := build/tools/deps \
build/tools/docs \

View File

@@ -52,6 +52,94 @@ $(eval $(call SetupJavaCompilation, BUILD_TOOLS_HOTSPOT, \
TARGETS += $(BUILD_TOOLS_HOTSPOT)
################################################################################
# Graal build tools
ifeq ($(INCLUDE_GRAAL), true)
VM_CI_SRC_DIR := $(TOPDIR)/src/jdk.internal.vm.ci/share/classes
SRC_DIR := $(TOPDIR)/src/jdk.internal.vm.compiler/share/classes
##############################################################################
# Compile the annotation processors
$(eval $(call SetupJavaCompilation, BUILD_VM_COMPILER_MATCH_PROCESSOR, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
SRC := \
$(SRC_DIR)/org.graalvm.compiler.processor/src \
$(SRC_DIR)/org.graalvm.compiler.core.match.processor/src \
, \
EXCLUDE_FILES := $(EXCLUDE_FILES), \
BIN := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.match.processor, \
JAR := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.match.processor.jar, \
DISABLED_WARNINGS := options, \
))
TARGETS += $(BUILD_VM_COMPILER_MATCH_PROCESSOR)
##############################################################################
$(eval $(call SetupJavaCompilation, BUILD_VM_COMPILER_NODEINFO_PROCESSOR, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
SRC := \
$(SRC_DIR)/org.graalvm.compiler.processor/src \
$(SRC_DIR)/org.graalvm.compiler.nodeinfo.processor/src \
, \
BIN := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.nodeinfo.processor, \
JAR := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.nodeinfo.processor.jar, \
DISABLED_WARNINGS := options, \
))
TARGETS += $(BUILD_VM_COMPILER_NODEINFO_PROCESSOR)
##############################################################################
$(eval $(call SetupJavaCompilation, BUILD_VM_COMPILER_OPTIONS_PROCESSOR, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
DISABLED_WARNINGS := options, \
SRC := \
$(SRC_DIR)/org.graalvm.compiler.processor/src \
$(SRC_DIR)/org.graalvm.compiler.options.processor/src \
, \
BIN := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.options.processor, \
JAR := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.options.processor.jar, \
))
TARGETS += $(BUILD_VM_COMPILER_OPTIONS_PROCESSOR)
##############################################################################
$(eval $(call SetupJavaCompilation, BUILD_VM_COMPILER_REPLACEMENTS_PROCESSOR, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
SRC := \
$(SRC_DIR)/org.graalvm.compiler.processor/src \
$(SRC_DIR)/org.graalvm.compiler.replacements.processor/src \
, \
EXCLUDE_FILES := $(EXCLUDE_FILES), \
BIN := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.replacements.verifier, \
JAR := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.replacements.verifier.jar, \
DISABLED_WARNINGS := options, \
))
TARGETS += $(BUILD_VM_COMPILER_REPLACEMENTS_PROCESSOR)
##############################################################################
$(eval $(call SetupJavaCompilation, BUILD_VM_COMPILER_SERVICEPROVIDER_PROCESSOR, \
TARGET_RELEASE := $(TARGET_RELEASE_BOOTJDK), \
SRC := \
$(SRC_DIR)/org.graalvm.compiler.processor/src \
$(SRC_DIR)/org.graalvm.compiler.serviceprovider.processor/src \
, \
EXCLUDE_FILES := $(EXCLUDE_FILES), \
BIN := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.serviceprovider.processor, \
JAR := $(BUILDTOOLS_OUTPUTDIR)/jdk.vm.compiler.serviceprovider.processor.jar, \
DISABLED_WARNINGS := options, \
))
TARGETS += $(BUILD_VM_COMPILER_SERVICEPROVIDER_PROCESSOR)
##############################################################################
endif
all: $(TARGETS)
.PHONY: all

View File

@@ -56,8 +56,7 @@ $(eval $(call SetupJavaCompilation, BUILD_TOOLS_JDK, \
DISABLED_WARNINGS := options, \
JAVAC_FLAGS := \
--add-exports java.desktop/sun.awt=ALL-UNNAMED \
--add-exports java.base/sun.text=ALL-UNNAMED \
--add-exports java.base/sun.security.util=ALL-UNNAMED, \
--add-exports java.base/sun.text=ALL-UNNAMED, \
))
TARGETS += $(BUILD_TOOLS_JDK)

View File

@@ -1,4 +1,4 @@
# Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -57,11 +57,14 @@ $(eval $(call IncludeCustomExtension, Docs.gmk))
################################################################################
# Javadoc settings
# Include configuration for URLs in generated javadoc
include $(TOPDIR)/make/conf/javadoc.conf
MODULES_SOURCE_PATH := $(call PathList, $(call GetModuleSrcPath) )
# URLs
JAVADOC_BASE_URL := https://docs.oracle.com/pls/topic/lookup?ctx=javase$(VERSION_NUMBER)&amp;id=homepage
BUG_SUBMIT_URL := https://bugreport.java.com/bugreport/
COPYRIGHT_URL := legal/copyright.html
LICENSE_URL := https://www.oracle.com/java/javase/terms/license/java$(VERSION_NUMBER)speclicense.html
REDISTRIBUTION_URL := https://www.oracle.com/technetwork/java/redist-137594.html
# In order to get a specific ordering it's necessary to specify the total
# ordering of tags as the tags are otherwise ordered in order of definition.
@@ -89,6 +92,7 @@ JAVADOC_TAGS := \
-tag see \
-taglet build.tools.taglet.ExtLink \
-taglet build.tools.taglet.Incubating \
-taglet build.tools.taglet.Preview \
-tagletpath $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes \
$(CUSTOM_JAVADOC_TAGS) \
#
@@ -99,8 +103,7 @@ JAVADOC_TAGS := \
REFERENCE_TAGS := $(JAVADOC_TAGS)
# Which doclint checks to ignore
JAVADOC_DISABLED_DOCLINT_WARNINGS := missing
JAVADOC_DISABLED_DOCLINT_PACKAGES := org.w3c.* javax.smartcardio
JAVADOC_DISABLED_DOCLINT := accessibility html missing syntax reference
# The initial set of options for javadoc
JAVADOC_OPTIONS := -use -keywords -notimestamp \
@@ -154,14 +157,13 @@ COPYRIGHT_BOTTOM = \
<a href="$(REDISTRIBUTION_URL)">documentation redistribution policy</a>. \
$(DRAFT_MARKER_STR) <!-- Version $(VERSION_STRING) -->
# $1 - Optional "Other Versions" link
JAVADOC_BOTTOM = \
JAVADOC_BOTTOM := \
<a href="$(BUG_SUBMIT_URL)">Report a bug or suggest an enhancement</a><br> \
For further API reference and developer documentation see the \
<a href="$(JAVADOC_BASE_URL)" target="_blank">Java SE \
Documentation</a>, which contains more detailed, \
developer-targeted descriptions with conceptual overviews, definitions \
of terms, workarounds, and working code examples. $1<br> \
of terms, workarounds, and working code examples.<br> \
Java is a trademark or registered trademark of $(FULL_COMPANY_NAME) in \
the US and other countries.<br> \
$(call COPYRIGHT_BOTTOM, {@docroot}/../)
@@ -263,7 +265,6 @@ endef
# SHORT_NAME - The short name of this documentation collection
# LONG_NAME - The long name of this documentation collection
# TARGET_DIR - Where to store the output
# OTHER_VERSIONS - URL for other page listing versions
#
SetupApiDocsGeneration = $(NamedParamsMacroTemplate)
define SetupApiDocsGenerationBody
@@ -296,26 +297,18 @@ define SetupApiDocsGenerationBody
# Create a string like "-Xdoclint:all,-syntax,-html,..."
$1_OPTIONS += -Xdoclint:all,$$(call CommaList, $$(addprefix -, \
$$(JAVADOC_DISABLED_DOCLINT_WARNINGS)))
# Ignore the doclint warnings in certain packages
$1_OPTIONS += -Xdoclint/package:$$(call CommaList, $$(addprefix -, \
$$(JAVADOC_DISABLED_DOCLINT_PACKAGES)))
$$(JAVADOC_DISABLED_DOCLINT)))
$1_DOC_TITLE := $$($1_LONG_NAME)<br>Version $$(VERSION_SPECIFICATION) API \
Specification
$1_WINDOW_TITLE := $$(subst &amp;,&,$$($1_SHORT_NAME))$$(DRAFT_MARKER_TITLE)
$1_HEADER_TITLE := <div $$(HEADER_STYLE)><strong>$$($1_SHORT_NAME)</strong> \
$$(DRAFT_MARKER_STR)</div>
ifneq ($$($1_OTHER_VERSIONS), )
$1_JAVADOC_BOTTOM := $$(call JAVADOC_BOTTOM, <a href="$$($1_OTHER_VERSIONS)">Other versions.</a>)
else
$1_JAVADOC_BOTTOM := $$(call JAVADOC_BOTTOM, )
endif
$1_OPTIONS += -doctitle '$$($1_DOC_TITLE)'
$1_OPTIONS += -windowtitle '$$($1_WINDOW_TITLE)'
$1_OPTIONS += -header '$$($1_HEADER_TITLE)'
$1_OPTIONS += -bottom '$$($1_JAVADOC_BOTTOM)'
$1_OPTIONS += -bottom '$$(JAVADOC_BOTTOM)'
ifeq ($$(IS_DRAFT), true)
$1_OPTIONS += -top '$$(JAVADOC_TOP)'
endif
@@ -332,12 +325,6 @@ define SetupApiDocsGenerationBody
$$(eval $$(call create_overview_file,$1))
$1_OPTIONS += -overview $$($1_OVERVIEW)
# Add summary pages for new/deprecated APIs in recent releases
$1_OPTIONS += --since $(call CommaList, \
$(filter-out $(VERSION_DOCS_API_SINCE), \
$(call sequence, $(VERSION_DOCS_API_SINCE), $(VERSION_FEATURE))))
$1_OPTIONS += --since-label "New API since JDK $(VERSION_DOCS_API_SINCE)"
$$(foreach g, $$($1_GROUPS), \
$$(eval $1_OPTIONS += -group "$$($$g_GROUP_NAME)" "$$($$g_GROUP_MODULES)") \
)
@@ -453,7 +440,6 @@ $(eval $(call SetupApiDocsGeneration, JDK_API, \
SHORT_NAME := $(JDK_SHORT_NAME), \
LONG_NAME := $(JDK_LONG_NAME), \
TARGET_DIR := $(DOCS_OUTPUTDIR)/api, \
OTHER_VERSIONS := $(OTHER_JDK_VERSIONS_URL), \
))
# Targets generated are returned in JDK_API_JAVADOC_TARGETS and
@@ -470,7 +456,7 @@ $(eval $(call SetupApiDocsGeneration, JAVASE_API, \
MODULES := $(JAVASE_MODULES), \
SHORT_NAME := $(JAVASE_SHORT_NAME), \
LONG_NAME := $(JAVASE_LONG_NAME), \
TARGET_DIR := $(DOCS_JAVASE_IMAGE_DIR)/api, \
TARGET_DIR := $(IMAGES_OUTPUTDIR)/javase-docs/api, \
))
# Targets generated are returned in JAVASE_API_JAVADOC_TARGETS and
@@ -488,8 +474,8 @@ $(eval $(call SetupApiDocsGeneration, REFERENCE_API, \
MODULES := $(JAVASE_MODULES), \
SHORT_NAME := $(JAVASE_SHORT_NAME), \
LONG_NAME := $(JAVASE_LONG_NAME), \
TARGET_DIR := $(DOCS_REFERENCE_IMAGE_DIR)/api, \
JAVADOC_CMD := $(DOCS_REFERENCE_JAVADOC), \
TARGET_DIR := $(IMAGES_OUTPUTDIR)/reference-docs/api, \
JAVADOC_CMD := $(JAVADOC), \
OPTIONS := $(REFERENCE_OPTIONS), \
TAGS := $(REFERENCE_TAGS), \
))

View File

@@ -61,7 +61,7 @@ endif
# Save the stderr output of the command and print it along with stdout in case
# something goes wrong.
$(CLASSLIST_FILE): $(INTERIM_IMAGE_DIR)/bin/java$(EXECUTABLE_SUFFIX) $(CLASSLIST_JAR)
$(CLASSLIST_FILE): $(INTERIM_IMAGE_DIR)/bin/java$(EXE_SUFFIX) $(CLASSLIST_JAR)
$(call MakeDir, $(LINK_OPT_DIR))
$(call LogInfo, Generating $(patsubst $(OUTPUTDIR)/%, %, $@))
$(call LogInfo, Generating $(patsubst $(OUTPUTDIR)/%, %, $(JLI_TRACE_FILE)))
@@ -94,7 +94,7 @@ $(CLASSLIST_FILE): $(INTERIM_IMAGE_DIR)/bin/java$(EXECUTABLE_SUFFIX) $(CLASSLIST
# dependencies, make will correctly rebuild both jli trace and classlist
# incrementally using the single recipe above.
$(CLASSLIST_FILE): $(JLI_TRACE_FILE)
$(JLI_TRACE_FILE): $(INTERIM_IMAGE_DIR)/bin/java$(EXECUTABLE_SUFFIX) $(CLASSLIST_JAR)
$(JLI_TRACE_FILE): $(INTERIM_IMAGE_DIR)/bin/java$(EXE_SUFFIX) $(CLASSLIST_JAR)
TARGETS += $(CLASSLIST_FILE) $(JLI_TRACE_FILE)

View File

@@ -44,7 +44,7 @@ ALL_MODULES := $(call FindAllModules)
$(eval $(call ReadImportMetaData))
JRE_MODULES += $(filter $(ALL_MODULES), $(BOOT_MODULES) \
$(PLATFORM_MODULES) jdk.jdwp.agent)
$(PLATFORM_MODULES) $(JRE_TOOL_MODULES))
JDK_MODULES += $(ALL_MODULES)
JRE_MODULES_LIST := $(call CommaList, $(JRE_MODULES))
@@ -238,7 +238,6 @@ endif
ALL_JDK_MODULES := $(JDK_MODULES)
ALL_JRE_MODULES := $(sort $(JRE_MODULES), $(foreach m, $(JRE_MODULES), \
$(call FindTransitiveDepsForModule, $m)))
ALL_SYMBOLS_MODULES := $(JDK_MODULES)
ifeq ($(call isTargetOs, windows), true)
LIBS_TARGET_SUBDIR := bin
@@ -294,7 +293,6 @@ SetupCopyDebuginfo = \
# implementation above.
$(call SetupCopyDebuginfo,JDK)
$(call SetupCopyDebuginfo,JRE)
$(call SetupCopyDebuginfo,SYMBOLS)
################################################################################

View File

@@ -200,7 +200,7 @@ ifeq ($(HAS_SPEC),)
COMPARE_BUILD="$(COMPARE_BUILD):NODRYRUN=true" main && \
$(MAKE) $(MFLAGS) $(MAKE_LOG_FLAGS) -r -R -f $(topdir)/make/Init.gmk \
SPEC=$(spec) HAS_SPEC=true ACTUAL_TOPDIR=$(topdir) \
COMPARE_BUILD="$(COMPARE_BUILD):NODRYRUN=true" post-compare-build && \
COMPARE_BUILD="$(COMPARE_BUILD)" post-compare-build && \
) \
) true ) \
$(eval TARGET_DONE=true) \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -32,7 +32,7 @@ include Modules.gmk
################################################################################
# Use this file inside the image as target for make rule
JIMAGE_TARGET_FILE := bin/java$(EXECUTABLE_SUFFIX)
JIMAGE_TARGET_FILE := bin/java$(EXE_SUFFIX)
INTERIM_MODULES_LIST := $(call CommaList, $(INTERIM_IMAGE_MODULES))

View File

@@ -38,8 +38,11 @@ ifeq ($(call isTargetOs, macosx), true)
MACOSX_PLIST_SRC := $(TOPDIR)/make/data/bundle
BUNDLE_ID := $(MACOSX_BUNDLE_ID_BASE).$(VERSION_SHORT)
BUNDLE_NAME := $(MACOSX_BUNDLE_NAME_BASE) $(VERSION_SHORT)
BUNDLE_INFO := $(MACOSX_BUNDLE_NAME_BASE) $(VERSION_STRING)
BUNDLE_PLATFORM_VERSION := $(VERSION_FEATURE).$(VERSION_INTERIM)
BUNDLE_VERSION := $(VERSION_NUMBER)
ifeq ($(COMPANY_NAME), N/A)
BUNDLE_VENDOR := UNDEFINED
else
@@ -72,26 +75,24 @@ ifeq ($(call isTargetOs, macosx), true)
SOURCE_FILES := $(MACOSX_PLIST_SRC)/JDK-Info.plist, \
OUTPUT_FILE := $(JDK_MACOSX_CONTENTS_DIR)/Info.plist, \
REPLACEMENTS := \
@@ID@@ => $(MACOSX_BUNDLE_ID_BASE).jdk ; \
@@ID@@ => $(BUNDLE_ID).jdk ; \
@@NAME@@ => $(BUNDLE_NAME) ; \
@@INFO@@ => $(BUNDLE_INFO) ; \
@@VERSION@@ => $(VERSION_NUMBER) ; \
@@BUILD_VERSION@@ => $(MACOSX_BUNDLE_BUILD_VERSION) ; \
@@VENDOR@@ => $(BUNDLE_VENDOR) ; \
@@MACOSX_VERSION_MIN@@ => $(MACOSX_VERSION_MIN) , \
@@PLATFORM_VERSION@@ => $(BUNDLE_PLATFORM_VERSION) ; \
@@VERSION@@ => $(BUNDLE_VERSION) ; \
@@VENDOR@@ => $(BUNDLE_VENDOR) , \
))
$(eval $(call SetupTextFileProcessing, BUILD_JRE_PLIST, \
SOURCE_FILES := $(MACOSX_PLIST_SRC)/JRE-Info.plist, \
OUTPUT_FILE := $(JRE_MACOSX_CONTENTS_DIR)/Info.plist, \
REPLACEMENTS := \
@@ID@@ => $(MACOSX_BUNDLE_ID_BASE).jre ; \
@@ID@@ => $(BUNDLE_ID).jre ; \
@@NAME@@ => $(BUNDLE_NAME) ; \
@@INFO@@ => $(BUNDLE_INFO) ; \
@@VERSION@@ => $(VERSION_NUMBER) ; \
@@BUILD_VERSION@@ => $(BUNDLE_BUILD_VERSION) ; \
@@VENDOR@@ => $(BUNDLE_VENDOR) ; \
@@MACOSX_VERSION_MIN@@ => $(MACOSX_VERSION_MIN) , \
@@PLATFORM_VERSION@@ => $(BUNDLE_PLATFORM_VERSION) ; \
@@VERSION@@ => $(BUNDLE_VERSION) ; \
@@VENDOR@@ => $(BUNDLE_VENDOR) , \
))
$(SUPPORT_OUTPUTDIR)/images/_jdk_bundle_attribute_set: $(COPY_JDK_IMAGE)

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -90,10 +90,12 @@ $(eval $(call SetupTarget, buildtools-jdk, \
$(eval $(call SetupTarget, buildtools-modules, \
MAKEFILE := CompileModuleTools, \
DEPS := exploded-image-base, \
))
$(eval $(call SetupTarget, buildtools-hotspot, \
MAKEFILE := CompileToolsHotspot, \
DEPS := interim-langtools, \
))
################################################################################
@@ -110,6 +112,7 @@ $(eval $(call SetupTarget, generate-exported-symbols, \
$(eval $(call DeclareRecipesForPhase, GENSRC, \
TARGET_SUFFIX := gensrc-src, \
FILE_PREFIX := Gensrc, \
MAKE_SUBDIR := gensrc, \
CHECK_MODULES := $(ALL_MODULES), \
))
@@ -147,6 +150,7 @@ ALL_TARGETS += $(GENSRC_TARGETS)
$(eval $(call DeclareRecipesForPhase, GENDATA, \
TARGET_SUFFIX := gendata, \
FILE_PREFIX := Gendata, \
MAKE_SUBDIR := gendata, \
CHECK_MODULES := $(ALL_MODULES), \
))
@@ -157,6 +161,7 @@ ALL_TARGETS += $(GENDATA_TARGETS)
$(eval $(call DeclareRecipesForPhase, COPY, \
TARGET_SUFFIX := copy, \
FILE_PREFIX := Copy, \
MAKE_SUBDIR := copy, \
CHECK_MODULES := $(ALL_MODULES), \
))
@@ -186,7 +191,6 @@ JAVA_TARGETS := $(addsuffix -java, $(JAVA_MODULES))
define DeclareCompileJavaRecipe
$1-java:
+($(CD) $(TOPDIR)/make && $(MAKE) $(MAKE_ARGS) \
$(patsubst %,-I%/modules/$1,$(PHASE_MAKEDIRS)) \
-f CompileJavaModules.gmk MODULE=$1)
endef
@@ -199,6 +203,7 @@ ALL_TARGETS += $(JAVA_TARGETS)
$(eval $(call DeclareRecipesForPhase, LIBS, \
TARGET_SUFFIX := libs, \
FILE_PREFIX := Lib, \
MAKE_SUBDIR := lib, \
CHECK_MODULES := $(ALL_MODULES), \
))
@@ -211,6 +216,7 @@ ALL_TARGETS += $(LIBS_TARGETS)
$(eval $(call DeclareRecipesForPhase, STATIC_LIBS, \
TARGET_SUFFIX := static-libs, \
FILE_PREFIX := Lib, \
MAKE_SUBDIR := lib, \
CHECK_MODULES := $(ALL_MODULES), \
EXTRA_ARGS := STATIC_LIBS=true, \
))
@@ -222,6 +228,7 @@ ALL_TARGETS += $(STATIC_LIBS_TARGETS)
$(eval $(call DeclareRecipesForPhase, LAUNCHER, \
TARGET_SUFFIX := launchers, \
FILE_PREFIX := Launcher, \
MAKE_SUBDIR := launcher, \
CHECK_MODULES := $(ALL_MODULES), \
))
@@ -338,7 +345,7 @@ $(eval $(call SetupTarget, test-image-demos-jdk, \
$(eval $(call SetupTarget, generate-summary, \
MAKEFILE := GenerateModuleSummary, \
DEPS := jmods buildtools-modules runnable-buildjdk, \
DEPS := jmods buildtools-modules, \
))
################################################################################
@@ -468,7 +475,7 @@ $(eval $(call SetupTarget, docs-jdk-api-javadoc, \
$(eval $(call SetupTarget, docs-jdk-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-jdk-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
DEPS := exploded-image buildtools-modules, \
))
$(eval $(call SetupTarget, docs-javase-api-javadoc, \
@@ -479,7 +486,7 @@ $(eval $(call SetupTarget, docs-javase-api-javadoc, \
$(eval $(call SetupTarget, docs-javase-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-javase-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
DEPS := exploded-image buildtools-modules, \
))
$(eval $(call SetupTarget, docs-reference-api-javadoc, \
@@ -490,7 +497,7 @@ $(eval $(call SetupTarget, docs-reference-api-javadoc, \
$(eval $(call SetupTarget, docs-reference-api-modulegraph, \
MAKEFILE := Docs, \
TARGET := docs-reference-api-modulegraph, \
DEPS := buildtools-modules runnable-buildjdk, \
DEPS := exploded-image buildtools-modules, \
))
# The gensrc steps for jdk.jdi create html spec files.
@@ -659,7 +666,19 @@ $(eval $(call SetupTarget, test-image-libtest-jtreg-native, \
DEPS := build-test-libtest-jtreg-native, \
))
ifneq ($(GTEST_FRAMEWORK_SRC), )
$(eval $(call SetupTarget, build-test-hotspot-jtreg-graal, \
MAKEFILE := test/JtregGraalUnit, \
TARGET := build-test-hotspot-jtreg-graal, \
DEPS := exploded-image, \
))
$(eval $(call SetupTarget, test-image-hotspot-jtreg-graal, \
MAKEFILE := test/JtregGraalUnit, \
TARGET := test-image-hotspot-jtreg-graal, \
DEPS := build-test-hotspot-jtreg-graal, \
))
ifneq ($GTEST_FRAMEWORK_SRC), )
$(eval $(call SetupTarget, test-image-hotspot-gtest, \
MAKEFILE := hotspot/test/GtestImage, \
DEPS := hotspot, \
@@ -736,24 +755,12 @@ $(eval $(call SetupTarget, test-bundles, \
DEPS := test-image, \
))
$(eval $(call SetupTarget, docs-jdk-bundles, \
$(eval $(call SetupTarget, docs-bundles, \
MAKEFILE := Bundles, \
TARGET := docs-jdk-bundles, \
TARGET := docs-bundles, \
DEPS := docs-image, \
))
$(eval $(call SetupTarget, docs-javase-bundles, \
MAKEFILE := Bundles, \
TARGET := docs-javase-bundles, \
DEPS := docs-javase-image, \
))
$(eval $(call SetupTarget, docs-reference-bundles, \
MAKEFILE := Bundles, \
TARGET := docs-reference-bundles, \
DEPS := docs-reference-image, \
))
$(eval $(call SetupTarget, static-libs-bundles, \
MAKEFILE := Bundles, \
TARGET := static-libs-bundles, \
@@ -862,6 +869,18 @@ else
# copied and processed.
java.desktop-gensrc-src: java.base-gensrc java.base-copy
# The annotation processing for jdk.internal.vm.compiler
# and jdk.internal.vm.compiler.management needs classes from the current JDK.
jdk.internal.vm.compiler-gensrc-src: $(addsuffix -java, \
$(call FindTransitiveDepsForModule, jdk.internal.vm.compiler))
jdk.internal.vm.compiler.management-gensrc-src: $(addsuffix -java, \
$(call FindTransitiveDepsForModule, jdk.internal.vm.compiler.management))
# For these modules, the gensrc step is generating a module-info.java.extra
# file to be processed by the gensrc-moduleinfo target.
jdk.internal.vm.compiler-gensrc-moduleinfo: jdk.internal.vm.compiler-gensrc-src
jdk.internal.vm.compiler.management-gensrc-moduleinfo: jdk.internal.vm.compiler.management-gensrc-src
jdk.jdeps-gendata: java
# The ct.sym generation uses all the moduleinfos as input
@@ -932,13 +951,10 @@ else
$(JMOD_TARGETS) $(INTERIM_JMOD_TARGETS): java.base-libs java.base-copy \
java.base-gendata jdk.jlink-launchers java
endif
else ifeq ($(EXTERNAL_BUILDJDK), false)
# The normal non cross compilation usecase needs to wait for the full
else
# The normal non cross compilation case uses needs to wait for the full
# exploded-image to avoid a race with the optimize target.
$(JMOD_TARGETS) $(INTERIM_JMOD_TARGETS): exploded-image
# The buildtools-modules are used for the exploded-image-optimize target,
# but can be built either using the exploded-image or an external BUILDJDK.
buildtools-modules: exploded-image-base
endif
# All modules include the main license files from java.base.
@@ -1059,18 +1075,6 @@ ifneq ($(COMPILE_TYPE), cross)
exploded-image: exploded-image-optimize
endif
# The runnable-buildjdk target guarantees that the buildjdk is done
# building and ready to be used. The exact set of dependencies it needs
# depends on what kind of buildjdk is used for the current configuration.
runnable-buildjdk:
ifeq ($(CREATE_BUILDJDK), true)
ifneq ($(CREATING_BUILDJDK), true)
runnable-buildjdk: create-buildjdk
endif
else ifeq ($(EXTERNAL_BUILDJDK), false)
runnable-buildjdk: exploded-image
endif
create-buildjdk: create-buildjdk-interim-image
docs-jdk-api: docs-jdk-api-javadoc
@@ -1124,16 +1128,8 @@ ifeq ($(call isTargetOs, macosx), true)
legacy-images: mac-legacy-jre-bundle
endif
# These targets build the various documentation images
docs-jdk-image: docs-jdk
docs-javase-image: docs-javase
docs-reference-image: docs-reference
# The docs-jdk-image is what most users expect to be built
docs-image: docs-jdk-image
all-docs-images: docs-jdk-image docs-javase-image docs-reference-image
docs-bundles: docs-jdk-bundles
all-docs-bundles: docs-jdk-bundles docs-javase-bundles docs-reference-bundles
# This target builds the documentation image
docs-image: docs-jdk
# This target builds the test image
test-image: prepare-test-image test-image-jdk-jtreg-native \
@@ -1149,6 +1145,10 @@ else
ifneq ($(GTEST_FRAMEWORK_SRC), )
test-image: test-image-hotspot-gtest
endif
ifeq ($(INCLUDE_GRAAL), true)
test-image: test-image-hotspot-jtreg-graal
endif
endif
ifeq ($(BUILD_FAILURE_HANDLER), true)
@@ -1162,7 +1162,7 @@ endif
################################################################################
# all-images builds all our deliverables as images.
all-images: product-images test-image all-docs-images
all-images: product-images test-image docs-image
# all-bundles packages all our deliverables as tar.gz bundles.
all-bundles: product-bundles test-bundles docs-bundles static-libs-bundles
@@ -1170,11 +1170,10 @@ all-bundles: product-bundles test-bundles docs-bundles static-libs-bundles
ALL_TARGETS += buildtools hotspot hotspot-libs hotspot-gensrc gensrc gendata \
copy java libs static-libs launchers jmods \
jdk.jdwp.agent-gensrc $(ALL_MODULES) demos \
exploded-image-base exploded-image runnable-buildjdk \
exploded-image-base exploded-image \
create-buildjdk docs-jdk-api docs-javase-api docs-reference-api docs-jdk \
docs-javase docs-reference docs-javadoc mac-bundles product-images legacy-images \
docs-image docs-javase-image docs-reference-image all-docs-images \
docs-bundles all-docs-bundles test-image all-images \
docs-image test-image all-images \
all-bundles
################################################################################

View File

@@ -150,7 +150,9 @@ define DeclareRecipeForModuleMakefile
$2-$$($1_TARGET_SUFFIX):
+($(CD) $(TOPDIR)/make && $(MAKE) $(MAKE_ARGS) \
-f ModuleWrapper.gmk -I $$(TOPDIR)/make/common/modules \
$$(patsubst %,-I%/modules/$2,$$(PHASE_MAKEDIRS)) \
$$(addprefix -I, $$(PHASE_MAKEDIRS) \
$$(addsuffix /modules/$2, $$(PHASE_MAKEDIRS)) \
) \
MODULE=$2 MAKEFILE_PREFIX=$$($1_FILE_PREFIX) $$($1_EXTRA_ARGS))
endef
@@ -183,6 +185,7 @@ endef
# Param 1: Name of list to add targets to
# Named params:
# TARGET_SUFFIX : Suffix of target to create for recipe
# MAKE_SUBDIR : Subdir for this build phase
# FILE_PREFIX : File prefix for this build phase
# CHECK_MODULES : List of modules to try
# MULTIPLE_MAKEFILES : Set to true to handle makefiles for the same module and

View File

@@ -39,7 +39,7 @@ include MakeBase.gmk
TARGETS :=
# Include the file being wrapped.
include $(MAKEFILE_PREFIX).gmk
include modules/$(MODULE)/$(MAKEFILE_PREFIX).gmk
# Setup copy rules from the modules directories to the jdk image directory.
ifeq ($(call isTargetOs, windows), true)

View File

@@ -53,7 +53,6 @@ define create-info-file
$(call info-file-item, "JAVA_VERSION_DATE", "$(VERSION_DATE)")
$(call info-file-item, "OS_NAME", "$(RELEASE_FILE_OS_NAME)")
$(call info-file-item, "OS_ARCH", "$(RELEASE_FILE_OS_ARCH)")
$(call info-file-item, "LIBC", "$(RELEASE_FILE_LIBC)")
endef
# Param 1 - The file containing the MODULES list

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -46,7 +46,7 @@ endif
$(eval $(call ParseKeywordVariable, TEST_OPTS, \
SINGLE_KEYWORDS := JOBS TIMEOUT_FACTOR JCOV JCOV_DIFF_CHANGESET, \
STRING_KEYWORDS := VM_OPTIONS JAVA_OPTIONS, \
STRING_KEYWORDS := VM_OPTIONS JAVA_OPTIONS AOT_MODULES, \
))
# Helper function to propagate TEST_OPTS values.
@@ -60,14 +60,19 @@ define SetTestOpt
endif
endef
# Setup _NT_SYMBOL_PATH on Windows, which points to our pdb files.
# Setup _NT_SYMBOL_PATH on Windows
ifeq ($(call isTargetOs, windows), true)
ifndef _NT_SYMBOL_PATH
SYMBOL_PATH := $(call PathList, $(sort $(patsubst %/, %, $(dir $(wildcard \
$(addprefix $(SYMBOLS_IMAGE_DIR)/bin/, *.pdb */*.pdb))))))
export _NT_SYMBOL_PATH := $(subst \\,\, $(call FixPath, \
$(subst $(DQUOTE),, $(SYMBOL_PATH))))
$(call LogDebug, Setting _NT_SYMBOL_PATH to $(_NT_SYMBOL_PATH))
# Can't use PathList here as it adds quotes around the value.
_NT_SYMBOL_PATH := \
$(subst $(SPACE),;,$(strip \
$(foreach p, $(sort $(dir $(wildcard \
$(addprefix $(SYMBOLS_IMAGE_DIR)/bin/, *.pdb */*.pdb)))), \
$(call FixPath, $p) \
) \
))
export _NT_SYMBOL_PATH
$(call LogDebug, Rewriting _NT_SYMBOL_PATH to $(_NT_SYMBOL_PATH))
endif
endif
@@ -134,6 +139,92 @@ ifeq ($(GCOV_ENABLED), true)
JTREG_COV_OPTIONS += -e:GCOV_PREFIX="$(GCOV_OUTPUT_DIR)"
endif
################################################################################
# Optionally create AOT libraries for specified modules before running tests.
# Note, this could not be done during JDK build time.
################################################################################
# Parameter 1 is the name of the rule.
#
# Remaining parameters are named arguments.
# MODULE The module to generate a library for
# BIN Output directory in which to put the library
# VM_OPTIONS List of JVM arguments to use when creating library
# OPTIONS_VAR Name of variable to put AOT java options in
# PREREQS_VAR Name of variable to put all AOT prerequisite rule targets in
# for test rules to depend on
#
SetupAotModule = $(NamedParamsMacroTemplate)
define SetupAotModuleBody
$1_AOT_LIB := $$($1_BIN)/$$(call SHARED_LIBRARY,$$($1_MODULE))
$1_AOT_CCLIST := $$(wildcard $$(TOPDIR)/test/hotspot/jtreg/compiler/aot/scripts/$$($1_MODULE)-list.txt)
# Create jaotc flags.
# VM flags which don't affect AOT code generation are filtered out:
# -Xcomp, -XX:+-TieredCompilation
$1_JAOTC_OPTS := \
-J-Xmx4g --info \
$$(addprefix -J, $$(filter-out -Xcomp %TieredCompilation, $$($1_VM_OPTIONS))) \
$$(addprefix --compile-commands$(SPACE), $$($1_AOT_CCLIST)) \
--linker-path $$(LD_JAOTC) \
#
ifneq ($$(filter -ea, $$($1_VM_OPTIONS)), )
$1_JAOTC_OPTS += --compile-with-assertions
endif
$$($1_AOT_LIB): $$(JDK_UNDER_TEST)/release \
$$(call DependOnVariable, $1_JAOTC_OPTS) \
$$(call DependOnVariable, JDK_UNDER_TEST)
$$(call LogWarn, Generating $$(patsubst $$(OUTPUTDIR)/%, %, $$@))
$$(call MakeTargetDir)
$$(call ExecuteWithLog, $$@, \
$((COV_ENVIRONMENT) \
$$(FIXPATH) $$(JDK_UNDER_TEST)/bin/jaotc \
$$($1_JAOTC_OPTS) --output $$@ --module $$($1_MODULE) \
)
$$(call ExecuteWithLog, $$@.check, ( \
$$(FIXPATH) $$(JDK_UNDER_TEST)/bin/java \
$$($1_VM_OPTIONS) -XX:+UnlockDiagnosticVMOptions -XX:+UnlockExperimentalVMOptions \
-XX:+PrintAOT -XX:+UseAOTStrictLoading \
-XX:AOTLibrary=$$@ -version \
> $$@.verify-aot \
))
$1_AOT_OPTIONS += -XX:+UnlockExperimentalVMOptions
$1_AOT_OPTIONS += -XX:AOTLibrary=$$($1_AOT_LIB)
$1_AOT_TARGETS += $$($1_AOT_LIB)
endef
################################################################################
# Optionally create AOT libraries before running tests.
# Note, this could not be done during JDK build time.
################################################################################
# Parameter 1 is the name of the rule.
#
# Remaining parameters are named arguments.
# MODULES The modules to generate a library for
# VM_OPTIONS List of JVM arguments to use when creating libraries
#
# After calling this, the following variables are defined
# $1_AOT_OPTIONS List of all java options needed to use the AOT libraries
# $1_AOT_TARGETS List of all targets that the test rule will need to depend on
#
SetupAot = $(NamedParamsMacroTemplate)
define SetupAotBody
$$(info Running with AOTd libraries for $$($1_MODULES))
# Put aot libraries in a separate directory so they are not deleted between
# test runs and may be reused between make invocations.
$$(foreach m, $$($1_MODULES), \
$$(eval $$(call SetupAotModule, $1_$$m, \
MODULE := $$m, \
BIN := $$(TEST_SUPPORT_DIR)/aot/$1, \
VM_OPTIONS := $$($1_VM_OPTIONS), \
)) \
$$(eval $1_AOT_OPTIONS += $$($1_$$m_AOT_OPTIONS)) \
$$(eval $1_AOT_TARGETS += $$($1_$$m_AOT_TARGETS)) \
)
endef
################################################################################
# Setup global test running parameters
################################################################################
@@ -192,6 +283,7 @@ endif
$(eval $(call SetTestOpt,VM_OPTIONS,JTREG))
$(eval $(call SetTestOpt,JAVA_OPTIONS,JTREG))
$(eval $(call SetTestOpt,AOT_MODULES,JTREG))
$(eval $(call SetTestOpt,JOBS,JTREG))
$(eval $(call SetTestOpt,TIMEOUT_FACTOR,JTREG))
@@ -202,7 +294,7 @@ $(eval $(call ParseKeywordVariable, JTREG, \
TEST_MODE ASSERT VERBOSE RETAIN MAX_MEM RUN_PROBLEM_LISTS \
RETRY_COUNT MAX_OUTPUT, \
STRING_KEYWORDS := OPTIONS JAVA_OPTIONS VM_OPTIONS KEYWORDS \
EXTRA_PROBLEM_LISTS LAUNCHER_OPTIONS, \
EXTRA_PROBLEM_LISTS AOT_MODULES LAUNCHER_OPTIONS, \
))
ifneq ($(JTREG), )
@@ -214,10 +306,11 @@ endif
$(eval $(call SetTestOpt,VM_OPTIONS,GTEST))
$(eval $(call SetTestOpt,JAVA_OPTIONS,GTEST))
$(eval $(call SetTestOpt,AOT_MODULES,GTEST))
$(eval $(call ParseKeywordVariable, GTEST, \
SINGLE_KEYWORDS := REPEAT, \
STRING_KEYWORDS := OPTIONS VM_OPTIONS JAVA_OPTIONS, \
STRING_KEYWORDS := OPTIONS VM_OPTIONS JAVA_OPTIONS AOT_MODULES, \
))
ifneq ($(GTEST), )
@@ -258,6 +351,8 @@ langtools_JTREG_PROBLEM_LIST += $(TOPDIR)/test/langtools/ProblemList.txt
hotspot_JTREG_PROBLEM_LIST += $(TOPDIR)/test/hotspot/jtreg/ProblemList.txt
lib-test_JTREG_PROBLEM_LIST += $(TOPDIR)/test/lib-test/ProblemList.txt
langtools_JTREG_MAX_MEM := 768m
################################################################################
# Parse test selection
#
@@ -498,18 +593,23 @@ define SetupRunGtestTestBody
$1_GTEST_REPEAT :=--gtest_repeat=$$(GTEST_REPEAT)
endif
run-test-$1: pre-run-test
ifneq ($$(GTEST_AOT_MODULES), )
$$(eval $$(call SetupAot, $1, \
MODULES := $$(GTEST_AOT_MODULES), \
VM_OPTIONS := $$(GTEST_VM_OPTIONS) $$(GTEST_JAVA_OPTIONS), \
))
endif
run-test-$1: pre-run-test $$($1_AOT_TARGETS)
$$(call LogWarn)
$$(call LogWarn, Running test '$$($1_TEST)')
$$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR))
$$(call ExecuteWithLog, $$($1_TEST_SUPPORT_DIR)/gtest, ( \
$$(CD) $$($1_TEST_SUPPORT_DIR) && \
$$(FIXPATH) $$(TEST_IMAGE_DIR)/hotspot/gtest/$$($1_VARIANT)/gtestLauncher \
-jdk $(JDK_UNDER_TEST) $$($1_GTEST_FILTER) \
--gtest_output=xml:$$($1_TEST_RESULTS_DIR)/gtest.xml \
--gtest_catch_exceptions=0 \
$$($1_GTEST_REPEAT) $$(GTEST_OPTIONS) $$(GTEST_VM_OPTIONS) \
$$(GTEST_JAVA_OPTIONS) \
$$(GTEST_JAVA_OPTIONS) $$($1_AOT_OPTIONS) \
> >($(TEE) $$($1_TEST_RESULTS_DIR)/gtest.txt) \
&& $$(ECHO) $$$$? > $$($1_EXITCODE) \
|| $$(ECHO) $$$$? > $$($1_EXITCODE) \
@@ -720,7 +820,7 @@ define SetupRunJtregTestBody
# Convert JTREG_foo into $1_JTREG_foo with a suitable value.
$$(eval $$(call SetJtregValue,$1,JTREG_TEST_MODE,agentvm))
$$(eval $$(call SetJtregValue,$1,JTREG_ASSERT,true))
$$(eval $$(call SetJtregValue,$1,JTREG_MAX_MEM,768m))
$$(eval $$(call SetJtregValue,$1,JTREG_MAX_MEM,512m))
$$(eval $$(call SetJtregValue,$1,JTREG_NATIVEPATH))
$$(eval $$(call SetJtregValue,$1,JTREG_BASIC_OPTIONS))
$$(eval $$(call SetJtregValue,$1,JTREG_PROBLEM_LIST))
@@ -736,7 +836,7 @@ define SetupRunJtregTestBody
# Make sure MaxRAMPercentage is high enough to not cause OOM or swapping since
# we may end up with a lot of JVM's
$1_JTREG_MAX_RAM_PERCENTAGE := $$(shell $(AWK) 'BEGIN { print 25 / $$($1_JTREG_JOBS); }')
$1_JTREG_MAX_RAM_PERCENTAGE := $$(shell $$(EXPR) 25 / $$($1_JTREG_JOBS))
JTREG_TIMEOUT_FACTOR ?= 4
@@ -816,6 +916,7 @@ define SetupRunJtregTestBody
endif
$1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_DIR=$(TEST_IMAGE_DIR)
$1_JTREG_BASIC_OPTIONS += -e:TEST_IMAGE_GRAAL_DIR=$(TEST_IMAGE_DIR)/hotspot/jtreg/graal
ifneq ($$(JTREG_FAILURE_HANDLER_OPTIONS), )
$1_JTREG_LAUNCHER_OPTIONS += -Djava.library.path="$(JTREG_FAILURE_HANDLER_DIR)"
@@ -832,6 +933,17 @@ define SetupRunJtregTestBody
endif
endif
ifneq ($$(JTREG_AOT_MODULES), )
$$(eval $$(call SetupAot, $1, \
MODULES := $$(JTREG_AOT_MODULES), \
VM_OPTIONS := $$(JTREG_VM_OPTIONS) $$(JTREG_JAVA_OPTIONS), \
))
endif
ifneq ($$($1_AOT_OPTIONS), )
$1_JTREG_BASIC_OPTIONS += -vmoptions:"$$($1_AOT_OPTIONS)"
endif
clean-workdir-$1:
$$(RM) -r $$($1_TEST_SUPPORT_DIR)
@@ -866,7 +978,7 @@ define SetupRunJtregTestBody
done
endif
run-test-$1: pre-run-test clean-workdir-$1
run-test-$1: pre-run-test clean-workdir-$1 $$($1_AOT_TARGETS)
$$(call LogWarn)
$$(call LogWarn, Running test '$$($1_TEST)')
$$(call MakeDir, $$($1_TEST_RESULTS_DIR) $$($1_TEST_SUPPORT_DIR) \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2017, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -189,11 +189,15 @@ ifeq ($(OPENJDK_TARGET_CPU), x86_64)
endif
ifeq ($(OPENJDK_TARGET_OS), windows)
FIXPATH_BASE := $(BASH) $(TOPDIR)/make/scripts/fixpath.sh
FIXPATH := $(FIXPATH_BASE) exec
ifeq ($(wildcard $(TEST_IMAGE_DIR)/bin/fixpath.exe), )
$(info Error: fixpath is missing from test image '$(TEST_IMAGE_DIR)')
$(error Cannot continue.)
endif
FIXPATH := $(TEST_IMAGE_DIR)/bin/fixpath.exe -c
PATH_SEP:=;
else
FIXPATH_BASE :=
FIXPATH :=
PATH_SEP:=:
endif
# Check number of cores and memory in MB
@@ -222,6 +226,25 @@ ifeq ($(MEMORY_SIZE), )
MEMORY_SIZE := 1024
endif
# Setup LD for AOT support
ifneq ($(DEVKIT_HOME), )
ifeq ($(OPENJDK_TARGET_OS), windows)
LD_JAOTC := $(DEVKIT_HOME)/VC/bin/x64/link.exe
LIBRARY_PREFIX :=
SHARED_LIBRARY_SUFFIX := .dll
else ifeq ($(OPENJDK_TARGET_OS), linux)
LD_JAOTC := $(DEVKIT_HOME)/bin/ld
LIBRARY_PREFIX := lib
SHARED_LIBRARY_SUFFIX := .so
else ifeq ($(OPENJDK_TARGET_OS), macosx)
LD_JAOTC := $(DEVKIT_HOME)/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld
LIBRARY_PREFIX := lib
SHARED_LIBRARY_SUFFIX := .dylib
endif
else
LD := ld
endif
ifneq ($(wildcard $(JDK_IMAGE_DIR)/template.xml), )
TEST_OPTS_JCOV := true
JCOV_IMAGE_DIR := $(JDK_IMAGE_DIR)
@@ -256,8 +279,8 @@ $(call CreateNewSpec, $(NEW_SPEC), \
MAKE := $(MAKE), \
BASH := $(BASH), \
JIB_JAR := $(JIB_JAR), \
FIXPATH_BASE := $(FIXPATH_BASE), \
FIXPATH := $(FIXPATH), \
PATH_SEP := $(PATH_SEP), \
OPENJDK_TARGET_OS := $(OPENJDK_TARGET_OS), \
OPENJDK_TARGET_OS_TYPE := $(OPENJDK_TARGET_OS_TYPE), \
OPENJDK_TARGET_OS_ENV := $(OPENJDK_TARGET_OS_ENV), \
@@ -267,6 +290,9 @@ $(call CreateNewSpec, $(NEW_SPEC), \
OPENJDK_TARGET_CPU_ENDIAN := $(OPENJDK_TARGET_CPU_ENDIAN), \
NUM_CORES := $(NUM_CORES), \
MEMORY_SIZE := $(MEMORY_SIZE), \
LD_JAOTC := $(LD_JAOTC), \
LIBRARY_PREFIX := $(LIBRARY_PREFIX), \
SHARED_LIBRARY_SUFFIX := $(SHARED_LIBRARY_SUFFIX), \
include $(TOPDIR)/make/RunTestsPrebuiltSpec.gmk, \
TEST_OPTS_JCOV := $(TEST_OPTS_JCOV), \
$(CUSTOM_NEW_SPEC_LINE), \

View File

@@ -116,13 +116,16 @@ JAVAC_CMD := $(BOOT_JDK)/bin/javac
JAR_CMD := $(BOOT_JDK)/bin/jar
JLINK_CMD := $(JDK_OUTPUTDIR)/bin/jlink
JMOD_CMD := $(JDK_OUTPUTDIR)/bin/jmod
JARSIGNER_CMD := $(BOOT_JDK)/bin/jarsigner
JAVA := $(FIXPATH) $(JAVA_CMD) $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
JAVA_SMALL := $(FIXPATH) $(JAVA_CMD) $(JAVA_FLAGS_SMALL) $(JAVA_FLAGS)
JAVA_DETACH := $(FIXPATH) $(FIXPATH_DETACH_FLAG) $(JAVA_CMD) $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
JAVAC := $(FIXPATH) $(JAVAC_CMD)
JAR := $(FIXPATH) $(JAR_CMD)
JLINK := $(FIXPATH) $(JLINK_CMD)
JMOD := $(FIXPATH) $(JMOD_CMD)
JARSIGNER := $(FIXPATH) $(JARSIGNER_CMD)
BUILD_JAVA := $(JDK_IMAGE_DIR)/bin/JAVA
################################################################################
@@ -150,6 +153,7 @@ LN := ln
MIG := mig
MKDIR := mkdir
MV := mv
NAWK := nawk
NICE := nice
PATCH := patch
PRINTF := printf
@@ -163,6 +167,7 @@ TAIL := tail
TEE := tee
TR := tr
TOUCH := touch
UNIQ := uniq
WC := wc
XARGS := xargs
ZIPEXE := zip
@@ -173,7 +178,7 @@ HG := hg
ULIMIT := ulimit
ifeq ($(OPENJDK_BUILD_OS), windows)
PATHTOOL := cygpath
CYGPATH := cygpath
endif
################################################################################

View File

@@ -30,25 +30,34 @@ include MakeBase.gmk
############################################################################
ifeq ($(call isTargetOs, windows), true)
FIXPATH_COPY := $(TEST_IMAGE_DIR)/bin/fixpath.exe
$(FIXPATH_COPY): $(firstword $(FIXPATH))
$(call install-file)
FIXPATH_WORKSPACE_ROOT := $(call FixPath, $(WORKSPACE_ROOT))
FIXPATH_OUTPUTDIR := $(call FixPath, $(OUTPUTDIR))
else
FIXPATH_WORKSPACE_ROOT := $(WORKSPACE_ROOT)
FIXPATH_OUTPUTDIR := $(OUTPUTDIR)
endif
BUILD_INFO_PROPERTIES := $(TEST_IMAGE_DIR)/build-info.properties
$(BUILD_INFO_PROPERTIES):
$(call MakeTargetDir)
$(ECHO) "# Build info properties for JDK tests" > $@
$(ECHO) "build.workspace.root=$(call FixPath, $(WORKSPACE_ROOT))" >> $@
$(ECHO) "build.output.root=$(call FixPath, $(OUTPUTDIR))" >> $@
$(ECHO) "build.workspace.root=$(FIXPATH_WORKSPACE_ROOT)" >> $@
$(ECHO) "build.output.root=$(FIXPATH_OUTPUTDIR)" >> $@
README := $(TEST_IMAGE_DIR)/Readme.txt
$(README):
$(call MakeTargetDir)
$(ECHO) > $@ 'JDK test image'
TARGETS += $(BUILD_INFO_PROPERTIES) $(README)
prepare-test-image: $(FIXPATH_COPY) $(BUILD_INFO_PROPERTIES)
$(call MakeDir, $(TEST_IMAGE_DIR))
$(ECHO) > $(TEST_IMAGE_DIR)/Readme.txt 'JDK test image'
################################################################################
prepare-test-image: $(TARGETS)
all: prepare-test-image
.PHONY: default all prepare-test-image

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2019, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -67,9 +67,8 @@ TOOL_GENERATECURRENCYDATA = $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_
TOOL_TZDB = $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes \
build.tools.tzdb.TzdbZoneRulesCompiler
TOOL_BLOCKED_CERTS = $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes \
--add-exports java.base/sun.security.util=ALL-UNNAMED \
build.tools.blockedcertsconverter.BlockedCertsConverter
TOOL_BLACKLISTED_CERTS = $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes \
build.tools.blacklistedcertsconverter.BlacklistedCertsConverter
TOOL_MAKEJAVASECURITY = $(JAVA_SMALL) -cp $(BUILDTOOLS_OUTPUTDIR)/jdk_tools_classes \
build.tools.makejavasecurity.MakeJavaSecurity

View File

@@ -70,18 +70,27 @@ AC_DEFUN_ONCE([BASIC_SETUP_PATHS],
fi
if test "x$OPENJDK_TARGET_OS" = "xwindows"; then
BASIC_SETUP_PATHS_WINDOWS
PATH_SEP=";"
EXE_SUFFIX=".exe"
BASIC_CHECK_PATHS_WINDOWS
else
PATH_SEP=":"
EXE_SUFFIX=""
fi
AC_SUBST(PATH_SEP)
AC_SUBST(EXE_SUFFIX)
# We get the top-level directory from the supporting wrappers.
BASIC_WINDOWS_VERIFY_DIR($TOPDIR, source)
UTIL_FIXUP_PATH(TOPDIR)
AC_MSG_CHECKING([for top-level directory])
AC_MSG_RESULT([$TOPDIR])
AC_SUBST(TOPDIR)
AC_SUBST(CONFIGURE_START_DIR)
# We can only call UTIL_FIXUP_PATH after BASIC_CHECK_PATHS_WINDOWS.
UTIL_FIXUP_PATH(TOPDIR)
UTIL_FIXUP_PATH(CONFIGURE_START_DIR)
if test "x$CUSTOM_ROOT" != x; then
BASIC_WINDOWS_VERIFY_DIR($CUSTOM_ROOT, custom root)
UTIL_FIXUP_PATH(CUSTOM_ROOT)
WORKSPACE_ROOT="${CUSTOM_ROOT}"
else
@@ -89,9 +98,6 @@ AC_DEFUN_ONCE([BASIC_SETUP_PATHS],
fi
AC_SUBST(WORKSPACE_ROOT)
UTIL_FIXUP_PATH(CONFIGURE_START_DIR)
AC_SUBST(CONFIGURE_START_DIR)
# Locate the directory of this script.
AUTOCONF_DIR=$TOPDIR/make/autoconf
@@ -111,16 +117,6 @@ AC_DEFUN([BASIC_EVAL_DEVKIT_VARIABLE],
fi
])
###############################################################################
# Evaluates platform specific overrides for build devkit variables.
# $1: Name of variable
AC_DEFUN([BASIC_EVAL_BUILD_DEVKIT_VARIABLE],
[
if test "x[$]$1" = x; then
eval $1="\${$1_${OPENJDK_BUILD_CPU}}"
fi
])
###############################################################################
AC_DEFUN_ONCE([BASIC_SETUP_DEVKIT],
[
@@ -228,12 +224,9 @@ AC_DEFUN_ONCE([BASIC_SETUP_DEVKIT],
# if no Xcode installed, xcodebuild exits with 1
# if Xcode is installed, even if xcode-select is misconfigured, then it exits with 0
if test "x$DEVKIT_ROOT" != x || /usr/bin/xcodebuild -version >/dev/null 2>&1; then
# We need to use xcodebuild in the toolchain dir provided by the user
UTIL_LOOKUP_PROGS(XCODEBUILD, xcodebuild, $TOOLCHAIN_PATH)
if test x$XCODEBUILD = x; then
# fall back on the stub binary in /usr/bin/xcodebuild
XCODEBUILD=/usr/bin/xcodebuild
fi
# We need to use xcodebuild in the toolchain dir provided by the user, this will
# fall back on the stub binary in /usr/bin/xcodebuild
AC_PATH_PROG([XCODEBUILD], [xcodebuild], [/usr/bin/xcodebuild], [$TOOLCHAIN_PATH])
else
# this should result in SYSROOT being empty, unless --with-sysroot is provided
# when only the command line tools are installed there are no SDKs, so headers
@@ -303,7 +296,6 @@ AC_DEFUN_ONCE([BASIC_SETUP_DEVKIT],
AC_MSG_RESULT([$SYSROOT])
AC_MSG_CHECKING([for toolchain path])
AC_MSG_RESULT([$TOOLCHAIN_PATH])
AC_SUBST(TOOLCHAIN_PATH)
AC_MSG_CHECKING([for extra path])
AC_MSG_RESULT([$EXTRA_PATH])
])
@@ -378,7 +370,6 @@ AC_DEFUN_ONCE([BASIC_SETUP_OUTPUT_DIR],
AC_MSG_CHECKING([what configuration name to use])
AC_MSG_RESULT([$CONF_NAME])
BASIC_WINDOWS_VERIFY_DIR($OUTPUTDIR, output)
UTIL_FIXUP_PATH(OUTPUTDIR)
CONFIGURESUPPORT_OUTPUTDIR="$OUTPUTDIR/configure-support"
@@ -415,16 +406,26 @@ AC_DEFUN([BASIC_CHECK_DIR_ON_LOCAL_DISK],
# df -l lists only local disks; if the given directory is not found then
# a non-zero exit code is given
if test "x$DF" = x; then
# No df here, say it's local
$2
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
# msys does not have df; use Windows "net use" instead.
IS_NETWORK_DISK=`net use | grep \`pwd -W | cut -d ":" -f 1 | tr a-z A-Z\`:`
if test "x$IS_NETWORK_DISK" = x; then
$2
else
$3
fi
else
# No df here, say it's local
$2
fi
else
# JDK-8189619
# df on AIX does not understand -l. On modern AIXes it understands "-T local" which
# is the same. On older AIXes we just continue to live with a "not local build" warning.
if test "x$OPENJDK_TARGET_OS" = xaix; then
DF_LOCAL_ONLY_OPTION='-T local'
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl1"; then
# In WSL1, we can only build on a drvfs file system (that is, a mounted real Windows drive)
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# In WSL, we can only build on a drvfs file system (that is, a mounted real Windows drive)
DF_LOCAL_ONLY_OPTION='-t drvfs'
else
DF_LOCAL_ONLY_OPTION='-l'

View File

@@ -28,37 +28,38 @@
# but is used by much of the early bootstrap code.
AC_DEFUN_ONCE([BASIC_SETUP_FUNDAMENTAL_TOOLS],
[
# Bootstrapping: These tools are needed by UTIL_LOOKUP_PROGS
AC_PATH_PROGS(BASENAME, basename)
UTIL_CHECK_NONEMPTY(BASENAME)
AC_PATH_PROGS(DIRNAME, dirname)
UTIL_CHECK_NONEMPTY(DIRNAME)
AC_PATH_PROGS(FILE, file)
UTIL_CHECK_NONEMPTY(FILE)
AC_PATH_PROGS(LDD, ldd)
# Start with tools that do not need have cross compilation support
# and can be expected to be found in the default PATH. These tools are
# used by configure.
# First are all the fundamental required tools.
# First are all the simple required tools.
UTIL_REQUIRE_PROGS(BASENAME, basename)
UTIL_REQUIRE_PROGS(BASH, bash)
UTIL_REQUIRE_PROGS(CAT, cat)
UTIL_REQUIRE_PROGS(CHMOD, chmod)
UTIL_REQUIRE_PROGS(CMP, cmp)
UTIL_REQUIRE_PROGS(COMM, comm)
UTIL_REQUIRE_PROGS(CP, cp)
UTIL_REQUIRE_PROGS(CUT, cut)
UTIL_REQUIRE_PROGS(DATE, date)
UTIL_REQUIRE_PROGS(DIFF, gdiff diff)
UTIL_REQUIRE_PROGS(DIFF, [gdiff diff])
UTIL_REQUIRE_PROGS(DIRNAME, dirname)
UTIL_REQUIRE_PROGS(ECHO, echo)
UTIL_REQUIRE_PROGS(EXPR, expr)
UTIL_REQUIRE_PROGS(FILE, file)
UTIL_REQUIRE_PROGS(FIND, find)
UTIL_REQUIRE_PROGS(HEAD, head)
UTIL_REQUIRE_PROGS(GUNZIP, gunzip)
UTIL_REQUIRE_PROGS(GZIP, pigz gzip)
UTIL_REQUIRE_PROGS(HEAD, head)
UTIL_REQUIRE_PROGS(LN, ln)
UTIL_REQUIRE_PROGS(LS, ls)
# gmkdir is known to be safe for concurrent invocations with -p flag.
UTIL_REQUIRE_PROGS(MKDIR, gmkdir mkdir)
UTIL_REQUIRE_PROGS(MKDIR, [gmkdir mkdir])
UTIL_REQUIRE_PROGS(MKTEMP, mktemp)
UTIL_REQUIRE_PROGS(MV, mv)
UTIL_REQUIRE_PROGS(AWK, gawk nawk awk)
UTIL_REQUIRE_PROGS(NAWK, [nawk gawk awk])
UTIL_REQUIRE_PROGS(PRINTF, printf)
UTIL_REQUIRE_PROGS(READLINK, [greadlink readlink])
UTIL_REQUIRE_PROGS(RM, rm)
UTIL_REQUIRE_PROGS(RMDIR, rmdir)
UTIL_REQUIRE_PROGS(SH, sh)
@@ -69,31 +70,34 @@ AC_DEFUN_ONCE([BASIC_SETUP_FUNDAMENTAL_TOOLS],
UTIL_REQUIRE_PROGS(TOUCH, touch)
UTIL_REQUIRE_PROGS(TR, tr)
UTIL_REQUIRE_PROGS(UNAME, uname)
UTIL_REQUIRE_PROGS(UNIQ, uniq)
UTIL_REQUIRE_PROGS(WC, wc)
UTIL_REQUIRE_PROGS(WHICH, which)
UTIL_REQUIRE_PROGS(XARGS, xargs)
# Then required tools that require some special treatment.
UTIL_REQUIRE_SPECIAL(AWK, [AC_PROG_AWK])
UTIL_REQUIRE_SPECIAL(GREP, [AC_PROG_GREP])
UTIL_REQUIRE_SPECIAL(EGREP, [AC_PROG_EGREP])
UTIL_REQUIRE_SPECIAL(FGREP, [AC_PROG_FGREP])
UTIL_REQUIRE_SPECIAL(SED, [AC_PROG_SED])
# Optional tools, we can do without them
UTIL_LOOKUP_PROGS(DF, df)
UTIL_LOOKUP_PROGS(NICE, nice)
UTIL_LOOKUP_PROGS(READLINK, greadlink readlink)
# These are only needed on some platforms
UTIL_LOOKUP_PROGS(PATHTOOL, cygpath wslpath)
UTIL_LOOKUP_PROGS(LSB_RELEASE, lsb_release)
UTIL_LOOKUP_PROGS(CMD, cmd.exe, $PATH:/cygdrive/c/windows/system32:/mnt/c/windows/system32:/c/windows/system32)
# For compare.sh only
UTIL_LOOKUP_PROGS(CMP, cmp)
UTIL_LOOKUP_PROGS(UNIQ, uniq)
# Always force rm.
RM="$RM -f"
# pwd behaves differently on various platforms and some don't support the -L flag.
# Always use the bash builtin pwd to get uniform behavior.
THEPWDCMD=pwd
# These are not required on all platforms
UTIL_PATH_PROGS(CYGPATH, cygpath)
UTIL_PATH_PROGS(WSLPATH, wslpath)
UTIL_PATH_PROGS(DF, df)
UTIL_PATH_PROGS(CPIO, [cpio bsdcpio])
UTIL_PATH_PROGS(NICE, nice)
UTIL_PATH_PROGS(LSB_RELEASE, lsb_release)
UTIL_PATH_PROGS(CMD, cmd.exe, $PATH /cygdrive/c/Windows/System32 /mnt/c/Windows/System32)
])
###############################################################################
@@ -128,14 +132,10 @@ AC_DEFUN([BASIC_CHECK_MAKE_VERSION],
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
MAKE_EXPECTED_ENV='cygwin'
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys2"; then
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
MAKE_EXPECTED_ENV='msys'
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl1" || test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl2"; then
if test "x$OPENJDK_BUILD_CPU" = "xaarch64"; then
MAKE_EXPECTED_ENV='aarch64-.*-linux-gnu'
else
MAKE_EXPECTED_ENV='x86_64-.*-linux-gnu'
fi
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
MAKE_EXPECTED_ENV='x86_64-.*-linux-gnu'
else
AC_MSG_ERROR([Unknown Windows environment])
fi
@@ -187,14 +187,14 @@ AC_DEFUN([BASIC_CHECK_MAKE_OUTPUT_SYNC],
# Goes looking for a usable version of GNU make.
AC_DEFUN([BASIC_CHECK_GNU_MAKE],
[
UTIL_SETUP_TOOL(MAKE,
UTIL_SETUP_TOOL([MAKE],
[
# Try our hardest to locate a correct version of GNU make
UTIL_LOOKUP_PROGS(CHECK_GMAKE, gmake)
AC_PATH_PROGS(CHECK_GMAKE, gmake)
BASIC_CHECK_MAKE_VERSION("$CHECK_GMAKE", [gmake in PATH])
if test "x$FOUND_MAKE" = x; then
UTIL_LOOKUP_PROGS(CHECK_MAKE, make)
AC_PATH_PROGS(CHECK_MAKE, make)
BASIC_CHECK_MAKE_VERSION("$CHECK_MAKE", [make in PATH])
fi
@@ -203,10 +203,10 @@ AC_DEFUN([BASIC_CHECK_GNU_MAKE],
# We have a toolchain path, check that as well before giving up.
OLD_PATH=$PATH
PATH=$TOOLCHAIN_PATH:$PATH
UTIL_LOOKUP_PROGS(CHECK_TOOLSDIR_GMAKE, gmake)
AC_PATH_PROGS(CHECK_TOOLSDIR_GMAKE, gmake)
BASIC_CHECK_MAKE_VERSION("$CHECK_TOOLSDIR_GMAKE", [gmake in tools-dir])
if test "x$FOUND_MAKE" = x; then
UTIL_LOOKUP_PROGS(CHECK_TOOLSDIR_MAKE, make)
AC_PATH_PROGS(CHECK_TOOLSDIR_MAKE, make)
BASIC_CHECK_MAKE_VERSION("$CHECK_TOOLSDIR_MAKE", [make in tools-dir])
fi
PATH=$OLD_PATH
@@ -336,17 +336,24 @@ AC_DEFUN_ONCE([BASIC_SETUP_COMPLEX_TOOLS],
# Non-required basic tools
UTIL_LOOKUP_PROGS(READELF, greadelf readelf)
UTIL_LOOKUP_PROGS(DOT, dot)
UTIL_LOOKUP_PROGS(HG, hg)
UTIL_LOOKUP_PROGS(GIT, git)
UTIL_LOOKUP_PROGS(STAT, stat)
UTIL_LOOKUP_PROGS(TIME, time)
UTIL_LOOKUP_PROGS(FLOCK, flock)
UTIL_PATH_PROGS(LDD, ldd)
if test "x$LDD" = "x"; then
# List shared lib dependencies is used for
# debug output and checking for forbidden dependencies.
# We can build without it.
LDD="true"
fi
UTIL_PATH_PROGS(READELF, [greadelf readelf])
UTIL_PATH_PROGS(DOT, dot)
UTIL_PATH_PROGS(HG, hg)
UTIL_PATH_PROGS(GIT, git)
UTIL_PATH_PROGS(STAT, stat)
UTIL_PATH_PROGS(TIME, time)
UTIL_PATH_PROGS(FLOCK, flock)
# Dtrace is usually found in /usr/sbin, but that directory may not
# be in the user path.
UTIL_LOOKUP_PROGS(DTRACE, dtrace, $PATH:/usr/sbin)
UTIL_LOOKUP_PROGS(PATCH, gpatch patch)
UTIL_PATH_PROGS(DTRACE, dtrace, $PATH:/usr/sbin)
UTIL_PATH_PROGS(PATCH, [gpatch patch])
# Check if it's GNU time
IS_GNU_TIME=`$TIME --version 2>&1 | $GREP 'GNU time'`
if test "x$IS_GNU_TIME" != x; then
@@ -360,7 +367,7 @@ AC_DEFUN_ONCE([BASIC_SETUP_COMPLEX_TOOLS],
UTIL_REQUIRE_PROGS(DSYMUTIL, dsymutil)
UTIL_REQUIRE_PROGS(MIG, mig)
UTIL_REQUIRE_PROGS(XATTR, xattr)
UTIL_LOOKUP_PROGS(CODESIGN, codesign)
UTIL_PATH_PROGS(CODESIGN, codesign)
if test "x$CODESIGN" != "x"; then
# Check for user provided code signing identity.
@@ -401,7 +408,7 @@ AC_DEFUN_ONCE([BASIC_SETUP_COMPLEX_TOOLS],
UTIL_REQUIRE_PROGS(SETFILE, SetFile)
fi
if ! test "x$OPENJDK_TARGET_OS" = "xwindows"; then
UTIL_REQUIRE_PROGS(ULIMIT, ulimit)
UTIL_REQUIRE_BUILTIN_PROGS(ULIMIT, ulimit)
fi
])
@@ -447,7 +454,7 @@ AC_DEFUN_ONCE([BASIC_CHECK_BASH_OPTIONS],
#
AC_DEFUN_ONCE([BASIC_SETUP_PANDOC],
[
UTIL_LOOKUP_PROGS(PANDOC, pandoc)
UTIL_PATH_PROGS(PANDOC, pandoc)
PANDOC_MARKDOWN_FLAG="markdown"
if test -n "$PANDOC"; then

View File

@@ -24,160 +24,97 @@
#
# Setup basic configuration paths, and platform-specific stuff related to PATHs.
AC_DEFUN([BASIC_SETUP_PATHS_WINDOWS],
AC_DEFUN([BASIC_CHECK_PATHS_WINDOWS],
[
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# Clarify if it is wsl1 or wsl2, and use that as OS_ENV from this point forward
$PATHTOOL -w / > /dev/null 2>&1
if test $? -ne 0; then
# Without Windows access to our root, it's definitely WSL1
OPENJDK_BUILD_OS_ENV=windows.wsl1
else
# This test is not guaranteed, but there is no documented way of
# distinguishing between WSL1 and WSL2. Assume only WSL2 has WSL_INTEROP
# in /run/WSL
if test -d "/run/WSL" ; then
OPENJDK_BUILD_OS_ENV=windows.wsl2
else
OPENJDK_BUILD_OS_ENV=windows.wsl1
fi
fi
# This is a bit silly since we really don't have a target env as such,
# but do it to keep consistency.
OPENJDK_TARGET_OS_ENV=$OPENJDK_BUILD_OS_ENV
fi
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys2"; then
# Must be done prior to calling any commands to avoid mangling of command line
export MSYS2_ARG_CONV_EXCL="*"
fi
AC_MSG_CHECKING([Windows environment type])
WINENV_VENDOR=${OPENJDK_BUILD_OS_ENV#windows.}
AC_MSG_RESULT([$WINENV_VENDOR])
if test "x$WINENV_VENDOR" = x; then
AC_MSG_ERROR([Unknown Windows environment. Neither cygwin, msys2, wsl1 nor wsl2 was detected.])
fi
if test "x$PATHTOOL" = x; then
AC_MSG_ERROR([Incorrect $WINENV_VENDOR installation. Neither cygpath nor wslpath was found])
fi
if test "x$CMD" = x; then
AC_MSG_ERROR([Incorrect Windows/$WINENV_VENDOR setup. Could not locate cmd.exe])
fi
AC_MSG_CHECKING([$WINENV_VENDOR drive prefix])
WINENV_PREFIX=`$PATHTOOL -u c:/ | $SED -e 's!/c/!!'`
AC_MSG_RESULT(['$WINENV_PREFIX'])
AC_SUBST(WINENV_PREFIX)
AC_MSG_CHECKING([$WINENV_VENDOR root directory as Windows path])
if test "x$OPENJDK_BUILD_OS_ENV" != "xwindows.wsl1"; then
WINENV_ROOT=`$PATHTOOL -w / 2> /dev/null`
# msys2 has a trailing backslash; strip it
WINENV_ROOT=${WINENV_ROOT%\\}
else
WINENV_ROOT='[[unavailable]]'
fi
AC_MSG_RESULT(['$WINENV_ROOT'])
AC_SUBST(WINENV_ROOT)
AC_MSG_CHECKING([$WINENV_VENDOR temp directory])
WINENV_TEMP_DIR=$($PATHTOOL -u $($CMD /q /c echo %TEMP% 2> /dev/null) | $TR -d '\r\n')
AC_MSG_RESULT([$WINENV_TEMP_DIR])
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl2"; then
# Don't trust the current directory for WSL2, but change to an OK temp dir
cd "$WINENV_TEMP_DIR"
# Bring along confdefs.h or autoconf gets all confused
cp "$CONFIGURE_START_DIR/confdefs.h" "$WINENV_TEMP_DIR"
fi
AC_MSG_CHECKING([$WINENV_VENDOR release])
WINENV_UNAME_RELEASE=`$UNAME -r`
AC_MSG_RESULT([$WINENV_UNAME_RELEASE])
AC_MSG_CHECKING([$WINENV_VENDOR version])
WINENV_UNAME_VERSION=`$UNAME -v`
AC_MSG_RESULT([$WINENV_UNAME_VERSION])
WINENV_VERSION="$WINENV_UNAME_RELEASE, $WINENV_UNAME_VERSION"
AC_MSG_CHECKING([Windows version])
# We must change directory to one guaranteed to work, otherwise WSL1
# can complain (since it does not have a WINENV_ROOT so it can't access
# unix-style paths from Windows.
# Additional [] needed to keep m4 from mangling shell constructs.
[ WINDOWS_VERSION=`cd $WINENV_TEMP_DIR && $CMD /c ver | $EGREP -o '([0-9]+\.)+[0-9]+'` ]
AC_MSG_RESULT([$WINDOWS_VERSION])
# Additional handling per specific env
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
# Additional [] needed to keep m4 from mangling shell constructs.
[ CYGWIN_VERSION_OLD=`$ECHO $WINENV_UNAME_RELEASE | $GREP -e '^1\.[0-6]'` ]
if test "x$CYGWIN_VERSION_OLD" != x; then
AC_MSG_NOTICE([Your cygwin is too old. You are running $CYGWIN_RELEASE, but at least cygwin 1.7 is required. Please upgrade.])
AC_MSG_ERROR([Cannot continue])
fi
if test "x$LDD" = x; then
AC_MSG_ERROR([ldd is missing, which is needed on cygwin])
fi
WINENV_MARKER_DLL=cygwin1.dll
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys2"; then
if test "x$LDD" = x; then
AC_MSG_ERROR([ldd is missing, which is needed on msys2])
fi
WINENV_MARKER_DLL=msys-2.0.dll
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl1" || test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl2"; then
AC_MSG_CHECKING([wsl distribution])
WSL_DISTRIBUTION=`$LSB_RELEASE -d | sed 's/Description:\t//'`
AC_MSG_RESULT([$WSL_DISTRIBUTION])
WINENV_VERSION="$WINENV_VERSION ($WSL_DISTRIBUTION)"
# Tell WSL to automatically translate the PATH variable
export WSLENV=PATH/l
fi
# Chicken and egg: FIXPATH is needed for UTIL_FIXUP_PATH to work. So for the
# first run we use the auto-detect abilities of fixpath.sh.
FIXPATH_DIR="$TOPDIR/make/scripts"
FIXPATH="$BASH $FIXPATH_DIR/fixpath.sh exec"
FIXPATH_BASE="$BASH $FIXPATH_DIR/fixpath.sh"
FIXPATH_SAVED_PATH="$PATH"
UTIL_FIXUP_PATH(FIXPATH_DIR)
# Now we can use FIXPATH_DIR to rewrite path to fixpath.sh properly.
if test "x$WINENV_PREFIX" = x; then
# On msys the prefix is empty, but we need to pass something to have the
# fixpath.sh options parser happy.
WINENV_PREFIX_ARG="NONE"
else
WINENV_PREFIX_ARG="$WINENV_PREFIX"
fi
FIXPATH_ARGS="-e $PATHTOOL -p $WINENV_PREFIX_ARG -r ${WINENV_ROOT//\\/\\\\} -t $WINENV_TEMP_DIR -c $CMD -q"
FIXPATH_BASE="$BASH $FIXPATH_DIR/fixpath.sh $FIXPATH_ARGS"
FIXPATH="$FIXPATH_BASE exec"
AC_SUBST(FIXPATH_BASE)
AC_SUBST(FIXPATH)
SRC_ROOT_LENGTH=`$ECHO "$TOPDIR" | $WC -m`
SRC_ROOT_LENGTH=`$THEPWDCMD -L|$WC -m`
if test $SRC_ROOT_LENGTH -gt 100; then
AC_MSG_ERROR([Your base path is too long. It is $SRC_ROOT_LENGTH characters long, but only 100 is supported])
fi
# Test if windows or unix "find" is first in path.
AC_MSG_CHECKING([Windows version])
# Additional [] needed to keep m4 from mangling shell constructs.
[ WINDOWS_VERSION=`$CMD /c ver.exe | $EGREP -o '([0-9]+\.)+[0-9]+'` ]
AC_MSG_RESULT([$WINDOWS_VERSION])
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
AC_MSG_CHECKING([cygwin release])
CYGWIN_RELEASE=`$UNAME -r`
AC_MSG_RESULT([$CYGWIN_RELEASE])
AC_MSG_CHECKING([cygwin version])
CYGWIN_VERSION=`$UNAME -v`
AC_MSG_RESULT([$CYGWIN_VERSION])
# Additional [] needed to keep m4 from mangling shell constructs.
[ CYGWIN_VERSION_OLD=`$ECHO $CYGWIN_RELEASE | $GREP -e '^1\.[0-6]'` ]
if test "x$CYGWIN_VERSION_OLD" != x; then
AC_MSG_NOTICE([Your cygwin is too old. You are running $CYGWIN_RELEASE, but at least cygwin 1.7 is required. Please upgrade.])
AC_MSG_ERROR([Cannot continue])
fi
WINDOWS_ENV_VENDOR='cygwin'
WINDOWS_ENV_VERSION="$CYGWIN_RELEASE, $CYGWIN_VERSION"
if test "x$CYGPATH" = x; then
AC_MSG_ERROR([Something is wrong with your cygwin installation since I cannot find cygpath.exe in your path])
fi
AC_MSG_CHECKING([cygwin root directory as unix-style path])
# The cmd output ends with Windows line endings (CR/LF)
cygwin_winpath_root=`cd / ; cmd /c cd | $TR -d '\r\n'`
# Force cygpath to report the proper root by including a trailing space, and then stripping it off again.
CYGWIN_ROOT_PATH=`$CYGPATH -u "$cygwin_winpath_root " | $CUT -f 1 -d " "`
AC_MSG_RESULT([$CYGWIN_ROOT_PATH])
WINDOWS_ENV_ROOT_PATH="$CYGWIN_ROOT_PATH"
test_cygdrive_prefix=`$ECHO $CYGWIN_ROOT_PATH | $GREP ^/cygdrive/`
if test "x$test_cygdrive_prefix" = x; then
AC_MSG_ERROR([Your cygdrive prefix is not /cygdrive. This is currently not supported. Change with mount -c.])
fi
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
AC_MSG_CHECKING([msys release])
MSYS_RELEASE=`$UNAME -r`
AC_MSG_RESULT([$MSYS_RELEASE])
AC_MSG_CHECKING([msys version])
MSYS_VERSION=`$UNAME -v`
AC_MSG_RESULT([$MSYS_VERSION])
WINDOWS_ENV_VENDOR='msys'
WINDOWS_ENV_VERSION="$MSYS_RELEASE, $MSYS_VERSION"
AC_MSG_CHECKING([msys root directory as unix-style path])
# The cmd output ends with Windows line endings (CR/LF), the grep command will strip that away
MSYS_ROOT_PATH=`cd / ; cmd /c cd | $GREP ".*"`
UTIL_REWRITE_AS_UNIX_PATH(MSYS_ROOT_PATH)
AC_MSG_RESULT([$MSYS_ROOT_PATH])
WINDOWS_ENV_ROOT_PATH="$MSYS_ROOT_PATH"
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
AC_MSG_CHECKING([WSL kernel version])
WSL_KERNEL_VERSION=`$UNAME -v`
AC_MSG_RESULT([$WSL_KERNEL_VERSION])
AC_MSG_CHECKING([WSL kernel release])
WSL_KERNEL_RELEASE=`$UNAME -r`
AC_MSG_RESULT([$WSL_KERNEL_RELEASE])
AC_MSG_CHECKING([WSL distribution])
WSL_DISTRIBUTION=`$LSB_RELEASE -d | sed 's/Description:\t//'`
AC_MSG_RESULT([$WSL_DISTRIBUTION])
WINDOWS_ENV_VENDOR='wsl'
WINDOWS_ENV_VERSION="$WSL_KERNEL_RELEASE, $WSL_KERNEL_VERSION ($WSL_DISTRIBUTION)"
else
AC_MSG_ERROR([Unknown Windows environment. Neither cygwin, msys, nor wsl was detected.])
fi
# Test if windows or unix (cygwin/msys) find is first in path.
AC_MSG_CHECKING([what kind of 'find' is first on the PATH])
FIND_BINARY_OUTPUT=`find --version 2>&1`
if test "x`echo $FIND_BINARY_OUTPUT | $GREP GNU`" != x; then
AC_MSG_RESULT([unix style])
elif test "x`echo $FIND_BINARY_OUTPUT | $GREP FIND`" != x; then
AC_MSG_RESULT([Windows])
AC_MSG_NOTICE([Your path contains Windows tools (C:\Windows\system32) before your unix tools.])
AC_MSG_NOTICE([Your path contains Windows tools (C:\Windows\system32) before your unix (cygwin or msys) tools.])
AC_MSG_NOTICE([This will not work. Please correct and make sure /usr/bin (or similar) is first in path.])
AC_MSG_ERROR([Cannot continue])
else
@@ -186,39 +123,73 @@ AC_DEFUN([BASIC_SETUP_PATHS_WINDOWS],
fi
])
# Verify that the directory is usable on Windows
AC_DEFUN([BASIC_WINDOWS_VERIFY_DIR],
[
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl1"; then
OUTPUTDIR_WIN=`$FIXPATH_BASE print $1`
if test "x$OUTPUTDIR_WIN" = x; then
AC_MSG_NOTICE([For wsl1, the $2 dir must be located on a Windows drive. Please see doc/building.md for details.])
AC_MSG_ERROR([Cannot continue])
fi
fi
])
# Create fixpath wrapper
AC_DEFUN([BASIC_WINDOWS_FINALIZE_FIXPATH],
AC_DEFUN_ONCE([BASIC_COMPILE_FIXPATH],
[
# When using cygwin or msys, we need a wrapper binary that renames
# /cygdrive/c/ arguments into c:/ arguments and peeks into
# @files and rewrites these too! This wrapper binary is
# called fixpath.
FIXPATH=
if test "x$OPENJDK_BUILD_OS" = xwindows; then
FIXPATH_CMDLINE=". $TOPDIR/make/scripts/fixpath.sh -e $PATHTOOL \
-p $WINENV_PREFIX_ARG -r ${WINENV_ROOT//\\/\\\\} -t $WINENV_TEMP_DIR \
-c $CMD -q"
$ECHO > $OUTPUTDIR/fixpath '#!/bin/bash'
$ECHO >> $OUTPUTDIR/fixpath export PATH='"[$]PATH:'$PATH'"'
$ECHO >> $OUTPUTDIR/fixpath $FIXPATH_CMDLINE '"[$]@"'
$CHMOD +x $OUTPUTDIR/fixpath
FIXPATH_BASE="$OUTPUTDIR/fixpath"
FIXPATH="$FIXPATH_BASE exec"
fi
])
# Platform-specific finalization
AC_DEFUN([BASIC_WINDOWS_FINALIZE],
[
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl2"; then
# Change back from temp dir
AC_MSG_CHECKING([if fixpath can be created])
FIXPATH_SRC="$TOPDIR/make/src/native/fixpath.c"
FIXPATH_BIN="$CONFIGURESUPPORT_OUTPUTDIR/bin/fixpath.exe"
FIXPATH_DIR="$CONFIGURESUPPORT_OUTPUTDIR/fixpath"
if test "x$OPENJDK_BUILD_OS_ENV" = xwindows.cygwin; then
# Important to keep the .exe suffix on Cygwin for Hotspot makefiles
FIXPATH="$FIXPATH_BIN -c"
elif test "x$OPENJDK_BUILD_OS_ENV" = xwindows.msys; then
# Take all collected prefixes and turn them into a -m/c/foo@/c/bar@... command line
# @ was chosen as separator to minimize risk of other tools messing around with it
all_unique_prefixes=`echo "${all_fixpath_prefixes@<:@@@:>@}" \
| tr ' ' '\n' | $GREP '^/./' | $SORT | $UNIQ`
fixpath_argument_list=`echo $all_unique_prefixes | tr ' ' '@'`
FIXPATH="$FIXPATH_BIN -m$fixpath_argument_list"
elif test "x$OPENJDK_BUILD_OS_ENV" = xwindows.wsl; then
FIXPATH="$FIXPATH_BIN -w"
fi
FIXPATH_SRC_W="$FIXPATH_SRC"
FIXPATH_BIN_W="$FIXPATH_BIN"
$RM -rf $FIXPATH_BIN $FIXPATH_DIR
$MKDIR -p $FIXPATH_DIR $CONFIGURESUPPORT_OUTPUTDIR/bin
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([FIXPATH_SRC_W])
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([FIXPATH_BIN_W])
cd $FIXPATH_DIR
$CC $FIXPATH_SRC_W -Fe$FIXPATH_BIN_W > $FIXPATH_DIR/fixpath1.log 2>&1
cd $CONFIGURE_START_DIR
if test ! -x $FIXPATH_BIN; then
AC_MSG_RESULT([no])
cat $FIXPATH_DIR/fixpath1.log
AC_MSG_ERROR([Could not create $FIXPATH_BIN])
fi
AC_MSG_RESULT([yes])
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
OLD_WSLENV="$WSLENV"
WSLENV=`$ECHO $WSLENV | $SED 's/PATH\/l://'`
UTIL_APPEND_TO_PATH(WSLENV, "FIXPATH_PATH")
export WSLENV
export FIXPATH_PATH=$VS_PATH_WINDOWS
AC_MSG_NOTICE([FIXPATH_PATH is $FIXPATH_PATH])
AC_MSG_NOTICE([Rewriting WSLENV from $OLD_WSLENV to $WSLENV])
fi
AC_MSG_CHECKING([if fixpath.exe works])
cd $FIXPATH_DIR
$FIXPATH $CC $FIXPATH_SRC -Fe$FIXPATH_DIR/fixpath2.exe \
> $FIXPATH_DIR/fixpath2.log 2>&1
cd $CONFIGURE_START_DIR
if test ! -x $FIXPATH_DIR/fixpath2.exe; then
AC_MSG_RESULT([no])
cat $FIXPATH_DIR/fixpath2.log
AC_MSG_ERROR([fixpath did not work!])
fi
AC_MSG_RESULT([yes])
FIXPATH_DETACH_FLAG="--detach"
fi
AC_SUBST(FIXPATH)
AC_SUBST(FIXPATH_DETACH_FLAG)
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -63,23 +63,19 @@ AC_DEFUN([BOOTJDK_DO_CHECK],
# If previous step claimed to have found a JDK, check it to see if it seems to be valid.
if test "x$BOOT_JDK_FOUND" = xmaybe; then
# Do we have a bin/java?
if test ! -x "$BOOT_JDK/bin/java" && test ! -x "$BOOT_JDK/bin/java.exe"; then
if test ! -x "$BOOT_JDK/bin/java$EXE_SUFFIX"; then
AC_MSG_NOTICE([Potential Boot JDK found at $BOOT_JDK did not contain bin/java; ignoring])
BOOT_JDK_FOUND=no
else
# Do we have a bin/javac?
if test ! -x "$BOOT_JDK/bin/javac" && test ! -x "$BOOT_JDK/bin/javac.exe"; then
if test ! -x "$BOOT_JDK/bin/javac$EXE_SUFFIX"; then
AC_MSG_NOTICE([Potential Boot JDK found at $BOOT_JDK did not contain bin/javac; ignoring])
AC_MSG_NOTICE([(This might be an JRE instead of an JDK)])
BOOT_JDK_FOUND=no
else
# Oh, this is looking good! We probably have found a proper JDK. Is it the correct version?
java_to_test="$BOOT_JDK/bin/java"
UTIL_FIXUP_EXECUTABLE(java_to_test)
BOOT_JDK_VERSION_OUTPUT=`$java_to_test $USER_BOOT_JDK_OPTIONS -version 2>&1`
# Additional [] needed to keep m4 from mangling shell constructs.
[ BOOT_JDK_VERSION=`echo $BOOT_JDK_VERSION_OUTPUT | $AWK '/version "[0-9a-zA-Z\._\-]+"/ {print $ 0; exit;}'` ]
[ BOOT_JDK_VERSION=`"$BOOT_JDK/bin/java$EXE_SUFFIX" $USER_BOOT_JDK_OPTIONS -version 2>&1 | $AWK '/version \"[0-9a-zA-Z\._\-]+\"/{print $ 0; exit;}'` ]
if [ [[ "$BOOT_JDK_VERSION" =~ "Picked up" ]] ]; then
AC_MSG_NOTICE([You have _JAVA_OPTIONS or JAVA_TOOL_OPTIONS set. This can mess up the build. Please use --with-boot-jdk-jvmargs instead.])
AC_MSG_NOTICE([Java reports: "$BOOT_JDK_VERSION".])
@@ -95,12 +91,7 @@ AC_DEFUN([BOOTJDK_DO_CHECK],
# Extra M4 quote needed to protect [] in grep expression.
[FOUND_CORRECT_VERSION=`$ECHO $BOOT_JDK_VERSION \
| $EGREP "\"(${DEFAULT_ACCEPTABLE_BOOT_VERSIONS// /|})([\.+-].*)?\""`]
if test "x$BOOT_JDK_VERSION" = x; then
AC_MSG_NOTICE([Potential Boot JDK found at $BOOT_JDK is not a working JDK; ignoring])
AC_MSG_NOTICE([Output from java -version was: $BOOT_JDK_VERSION_OUTPUT])
BOOT_JDK_FOUND=no
elif test "x$FOUND_CORRECT_VERSION" = x; then
if test "x$FOUND_CORRECT_VERSION" = x; then
AC_MSG_NOTICE([Potential Boot JDK found at $BOOT_JDK is incorrect JDK version ($BOOT_JDK_VERSION); ignoring])
AC_MSG_NOTICE([(Your Boot JDK version must be one of: $DEFAULT_ACCEPTABLE_BOOT_VERSIONS)])
BOOT_JDK_FOUND=no
@@ -111,9 +102,7 @@ AC_DEFUN([BOOTJDK_DO_CHECK],
AC_MSG_CHECKING([for Boot JDK])
AC_MSG_RESULT([$BOOT_JDK])
AC_MSG_CHECKING([Boot JDK version])
BOOT_JDK_VERSION=`$java_to_test $USER_BOOT_JDK_OPTIONS -version 2>&1 | $TR -d '\r'`
# This is not a no-op; it will portably convert newline to space
BOOT_JDK_VERSION=`$ECHO $BOOT_JDK_VERSION`
BOOT_JDK_VERSION=`"$BOOT_JDK/bin/java$EXE_SUFFIX" $USER_BOOT_JDK_OPTIONS -version 2>&1 | $TR '\n\r' ' '`
AC_MSG_RESULT([$BOOT_JDK_VERSION])
fi # end check jdk version
fi # end check javac
@@ -162,8 +151,8 @@ AC_DEFUN([BOOTJDK_CHECK_JAVA_HOME],
[
if test "x$JAVA_HOME" != x; then
JAVA_HOME_PROCESSED="$JAVA_HOME"
UTIL_FIXUP_PATH(JAVA_HOME_PROCESSED, NOFAIL)
if test "x$JAVA_HOME_PROCESSED" = x || test ! -d "$JAVA_HOME_PROCESSED"; then
UTIL_FIXUP_PATH(JAVA_HOME_PROCESSED)
if test ! -d "$JAVA_HOME_PROCESSED"; then
AC_MSG_NOTICE([Your JAVA_HOME points to a non-existing directory!])
else
# Aha, the user has set a JAVA_HOME
@@ -178,8 +167,8 @@ AC_DEFUN([BOOTJDK_CHECK_JAVA_HOME],
# Test: Is there a java or javac in the PATH, which is a symlink to the JDK?
AC_DEFUN([BOOTJDK_CHECK_JAVA_IN_PATH_IS_SYMLINK],
[
UTIL_LOOKUP_PROGS(JAVAC_CHECK, javac, , NOFIXPATH)
UTIL_LOOKUP_PROGS(JAVA_CHECK, java, , NOFIXPATH)
AC_PATH_PROG(JAVAC_CHECK, javac)
AC_PATH_PROG(JAVA_CHECK, java)
BINARY="$JAVAC_CHECK"
if test "x$JAVAC_CHECK" = x; then
BINARY="$JAVA_CHECK"
@@ -218,9 +207,9 @@ AC_DEFUN([BOOTJDK_CHECK_MACOSX_JAVA_LOCATOR],
# First check at user selected default
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_LIBEXEC_JAVA_HOME()])
# If that did not work out (e.g. too old), try explicit versions instead
for ver in $DEFAULT_ACCEPTABLE_BOOT_VERSIONS ; do
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_LIBEXEC_JAVA_HOME([-v $ver])])
done
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_LIBEXEC_JAVA_HOME([-v 1.9])])
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_LIBEXEC_JAVA_HOME([-v 1.8])])
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_LIBEXEC_JAVA_HOME([-v 1.7])])
fi
])
@@ -253,10 +242,8 @@ AC_DEFUN([BOOTJDK_FIND_BEST_JDK_IN_WINDOWS_VIRTUAL_DIRECTORY],
[
if test "x[$]$1" != x; then
VIRTUAL_DIR="[$]$1/Java"
UTIL_FIXUP_PATH(VIRTUAL_DIR, NOFAIL)
if test "x$VIRTUAL_DIR" != x; then
BOOTJDK_FIND_BEST_JDK_IN_DIRECTORY($VIRTUAL_DIR)
fi
UTIL_REWRITE_AS_UNIX_PATH(VIRTUAL_DIR)
BOOTJDK_FIND_BEST_JDK_IN_DIRECTORY($VIRTUAL_DIR)
fi
])
@@ -283,17 +270,16 @@ AC_DEFUN([BOOTJDK_CHECK_WELL_KNOWN_LOCATIONS],
AC_DEFUN([BOOTJDK_CHECK_TOOL_IN_BOOTJDK],
[
# Use user overridden value if available, otherwise locate tool in the Boot JDK.
UTIL_REQUIRE_SPECIAL($1,
UTIL_SETUP_TOOL($1,
[
AC_MSG_CHECKING([for $2 [[Boot JDK]]])
AC_MSG_CHECKING([for $2 in Boot JDK])
$1=$BOOT_JDK/bin/$2
if test ! -x [$]$1 && test ! -x [$]$1.exe; then
if test ! -x [$]$1; then
AC_MSG_RESULT(not found)
AC_MSG_NOTICE([Your Boot JDK seems broken. This might be fixed by explicitly setting --with-boot-jdk])
AC_MSG_ERROR([Could not find $2 in the Boot JDK])
fi
AC_MSG_RESULT(\[$]BOOT_JDK/bin/$2)
UTIL_FIXUP_EXECUTABLE($1)
AC_MSG_RESULT(ok)
AC_SUBST($1)
])
])
@@ -327,12 +313,12 @@ AC_DEFUN_ONCE([BOOTJDK_SETUP_BOOT_JDK],
AC_MSG_ERROR([The path given by --with-boot-jdk does not contain a valid Boot JDK])
fi
# Test: Is $JAVA_HOME set?
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_JAVA_HOME])
# Test: On MacOS X, can we find a boot jdk using /usr/libexec/java_home?
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_MACOSX_JAVA_LOCATOR])
# Test: Is $JAVA_HOME set?
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_JAVA_HOME])
# Test: Is there a java or javac in the PATH, which is a symlink to the JDK?
BOOTJDK_DO_CHECK([BOOTJDK_CHECK_JAVA_IN_PATH_IS_SYMLINK])
@@ -350,10 +336,11 @@ AC_DEFUN_ONCE([BOOTJDK_SETUP_BOOT_JDK],
AC_SUBST(BOOT_JDK)
# Setup tools from the Boot JDK.
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVA, java)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVAC, javac)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVADOC, javadoc)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAR, jar)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVA, java$EXE_SUFFIX)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVAC, javac$EXE_SUFFIX)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAVADOC, javadoc$EXE_SUFFIX)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JAR, jar$EXE_SUFFIX)
BOOTJDK_CHECK_TOOL_IN_BOOTJDK(JARSIGNER, jarsigner$EXE_SUFFIX)
# Finally, set some other options...
@@ -366,7 +353,7 @@ AC_DEFUN_ONCE([BOOTJDK_SETUP_BOOT_JDK],
AC_SUBST(BOOT_JDK_SOURCETARGET)
# Check if the boot jdk is 32 or 64 bit
if $JAVA -version 2>&1 | $GREP -q "64-Bit"; then
if "$JAVA" -version 2>&1 | $GREP -q "64-Bit"; then
BOOT_JDK_BITS="64"
else
BOOT_JDK_BITS="32"
@@ -381,7 +368,7 @@ AC_DEFUN_ONCE([BOOTJDK_SETUP_BOOT_JDK],
if test "x$boot_jdk_cds_args" != x; then
# Try creating a CDS archive
$JAVA $boot_jdk_cds_args -Xshare:dump > /dev/null 2>&1
"$JAVA" $boot_jdk_cds_args -Xshare:dump > /dev/null 2>&1
if test $? -eq 0; then
BOOTJDK_USE_LOCAL_CDS=true
AC_MSG_RESULT([yes, created])
@@ -529,7 +516,7 @@ AC_DEFUN([BOOTJDK_CHECK_BUILD_JDK],
else
# Oh, this is looking good! We probably have found a proper JDK. Is it the correct version?
# Additional [] needed to keep m4 from mangling shell constructs.
[ BUILD_JDK_VERSION=`"$BUILD_JDK/bin/java" -version 2>&1 | $AWK '/version "[0-9a-zA-Z\._\-]+"/ {print $ 0; exit;}'` ]
[ BUILD_JDK_VERSION=`"$BUILD_JDK/bin/java" -version 2>&1 | $AWK '/version \"[0-9a-zA-Z\._\-]+\"/{print $ 0; exit;}'` ]
# Extra M4 quote needed to protect [] in grep expression.
[FOUND_CORRECT_VERSION=`echo $BUILD_JDK_VERSION | $EGREP "\"$VERSION_FEATURE([\.+-].*)?\""`]
@@ -587,12 +574,8 @@ AC_DEFUN([BOOTJDK_SETUP_BUILD_JDK],
fi
fi
# Since these tools do not yet exist, we cannot use UTIL_FIXUP_EXECUTABLE to
# detect the need of fixpath
JMOD="$BUILD_JDK/bin/jmod"
UTIL_ADD_FIXPATH(JMOD)
JLINK="$BUILD_JDK/bin/jlink"
UTIL_ADD_FIXPATH(JLINK)
AC_SUBST(JMOD)
AC_SUBST(JLINK)
@@ -606,28 +589,3 @@ AC_DEFUN([BOOTJDK_SETUP_BUILD_JDK],
AC_SUBST(BUILD_JDK)
AC_SUBST(EXTERNAL_BUILDJDK)
])
# The docs-reference JDK is used to run javadoc for the docs-reference targets.
# If not set, the reference docs will be built using the interim javadoc.
AC_DEFUN([BOOTJDK_SETUP_DOCS_REFERENCE_JDK],
[
AC_ARG_WITH(docs-reference-jdk, [AS_HELP_STRING([--with-docs-reference-jdk],
[path to JDK to use for building the reference documentation])])
AC_MSG_CHECKING([for docs-reference JDK])
if test "x$with_docs_reference_jdk" != "x"; then
DOCS_REFERENCE_JDK="$with_docs_reference_jdk"
AC_MSG_RESULT([$DOCS_REFERENCE_JDK])
DOCS_REFERENCE_JAVADOC="$DOCS_REFERENCE_JDK/bin/javadoc"
if test ! -x "$DOCS_REFERENCE_JAVADOC"; then
AC_MSG_ERROR([docs-reference JDK found at $DOCS_REFERENCE_JDK did not contain bin/javadoc])
fi
UTIL_FIXUP_EXECUTABLE(DOCS_REFERENCE_JAVADOC)
else
AC_MSG_RESULT([no, using interim javadoc for the docs-reference targets])
# By leaving this empty, Docs.gmk will revert to the default interim javadoc
DOCS_REFERENCE_JAVADOC=
fi
AC_SUBST(DOCS_REFERENCE_JAVADOC)
])

View File

@@ -28,6 +28,14 @@
# First include the real base spec.gmk file
include @SPEC@
# Check that the user did not try to specify a different java to use for compiling.
# On windows we need to account for fixpath being first word.
ifeq ($(firstword $(JAVA)),$(FIXPATH))
JAVA_EXEC_POS=2
else
JAVA_EXEC_POS=1
endif
# Override specific values to do a boot cycle build
# Use a different Boot JDK
@@ -39,9 +47,10 @@ OUTPUTDIR:=$(OLD_OUTPUTDIR)/bootcycle-build
# No spaces in patsubst to avoid leading space in variable
JAVAC_SERVER_DIR:=$(patsubst $(OLD_OUTPUTDIR)%,$(OUTPUTDIR)%,$(JAVAC_SERVER_DIR))
JAVA_CMD := $(FIXPATH) $(BOOT_JDK)/bin/java
JAVAC_CMD := $(FIXPATH) $(BOOT_JDK)/bin/javac
JAR_CMD := $(FIXPATH) $(BOOT_JDK)/bin/jar
JAVA_CMD:=$(BOOT_JDK)/bin/java
JAVAC_CMD:=$(BOOT_JDK)/bin/javac
JAR_CMD:=$(BOOT_JDK)/bin/jar
JARSIGNER_CMD:=$(BOOT_JDK)/bin/jarsigner
# The bootcycle JVM arguments may differ from the original boot jdk.
JAVA_FLAGS_BIG := @BOOTCYCLE_JVM_ARGS_BIG@
# Any CDS settings generated for the bootjdk are invalid in the bootcycle build.
@@ -49,3 +58,5 @@ JAVA_FLAGS_BIG := @BOOTCYCLE_JVM_ARGS_BIG@
# settings for CDS.
JAVA_FLAGS := $(filter-out -XX:SharedArchiveFile% -Xshare%, $(JAVA_FLAGS))
# Pandoc cannot be used without the jjs plugin, which was removed with Nashorn.
ENABLE_PANDOC := false

View File

@@ -1,7 +1,6 @@
#!/bin/sh
#
# Copyright (c) 2012, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2021, Azul Systems, Inc. All rights reserved.
# Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -31,17 +30,6 @@
DIR=`dirname $0`
OUT=`. $DIR/autoconf-config.guess`
# Detect C library.
# Use '-gnu' suffix on systems that use glibc.
# Use '-musl' suffix on systems that use the musl libc.
echo $OUT | grep -- -linux- > /dev/null 2> /dev/null
if test $? = 0; then
libc_vendor=`ldd --version 2>&1 | sed -n '1s/.*\(musl\).*/\1/p'`
if [ x"${libc_vendor}" = x"musl" ]; then
OUT=`echo $OUT | sed 's/-gnu/-musl/'`
fi
fi
# Test and fix cygwin on x86_64
echo $OUT | grep 86-pc-cygwin > /dev/null 2> /dev/null
if test $? != 0; then
@@ -57,11 +45,11 @@ if test $? = 0; then
fi
# Test and fix wsl
echo $OUT | grep unknown-linux-gnu > /dev/null 2> /dev/null
echo $OUT | grep x86_64-unknown-linux-gnu > /dev/null 2> /dev/null
if test $? = 0; then
uname -r | grep -i microsoft > /dev/null 2> /dev/null
if test $? = 0; then
OUT=`echo $OUT | sed -e 's/unknown-linux-gnu/pc-wsl/'`
OUT="x86_64-pc-wsl"
fi
fi
@@ -102,18 +90,6 @@ if [ "x$OUT" = x ]; then
fi
fi
# Test and fix cpu on macos-aarch64, uname -p reports arm, buildsys expects aarch64
echo $OUT | grep arm-apple-darwin > /dev/null 2> /dev/null
if test $? != 0; then
# The GNU version of uname may be on the PATH which reports arm64 instead
echo $OUT | grep arm64-apple-darwin > /dev/null 2> /dev/null
fi
if test $? = 0; then
if [ `uname -m` = arm64 ]; then
OUT=aarch64`echo $OUT | sed -e 's/[^-]*//'`
fi
fi
# Test and fix cpu on Macosx when C preprocessor is not on the path
echo $OUT | grep i386-apple-darwin > /dev/null 2> /dev/null
if test $? = 0; then

View File

@@ -1,6 +1,6 @@
#!/bin/sh
#
# Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -29,19 +29,8 @@
DIR=`dirname $0`
if echo $* | grep linux-musl >/dev/null ; then
echo $*
exit
fi
# Allow wsl
if echo $* | grep pc-wsl >/dev/null ; then
echo $*
exit
fi
# Allow msys2
if echo $* | grep pc-msys >/dev/null ; then
if echo $* | grep x86_64-pc-wsl >/dev/null ; then
echo $*
exit
fi
@@ -55,7 +44,7 @@ if ! echo $* | grep '^aarch64-' >/dev/null ; then
fi
while test $# -gt 0 ; do
case $1 in
case $1 in
-- ) # Stop option processing
shift; break ;;
aarch64-* )

View File

@@ -175,7 +175,7 @@ AC_DEFUN([BPERF_SETUP_CCACHE],
if test "x$TOOLCHAIN_PATH" != x; then
PATH=$TOOLCHAIN_PATH:$PATH
fi
UTIL_LOOKUP_PROGS(CCACHE, ccache)
UTIL_PATH_PROGS(CCACHE, ccache)
PATH="$OLD_PATH"
AC_MSG_CHECKING([if ccache is available])

View File

@@ -54,13 +54,11 @@ IMAGES_OUTPUTDIR := $(patsubst $(OUTPUTDIR)%,$(BUILDJDK_OUTPUTDIR)%,$(IMAGES_OUT
OPENJDK_BUILD_CPU_LEGACY := @OPENJDK_BUILD_CPU_LEGACY@
OPENJDK_BUILD_CPU_LEGACY_LIB := @OPENJDK_BUILD_CPU_LEGACY_LIB@
OPENJDK_BUILD_LIBC := @OPENJDK_BUILD_LIBC@
OPENJDK_TARGET_CPU := @OPENJDK_BUILD_CPU@
OPENJDK_TARGET_CPU_ARCH := @OPENJDK_BUILD_CPU_ARCH@
OPENJDK_TARGET_CPU_BITS := @OPENJDK_BUILD_CPU_BITS@
OPENJDK_TARGET_CPU_ENDIAN := @OPENJDK_BUILD_CPU_ENDIAN@
OPENJDK_TARGET_CPU_LEGACY := @OPENJDK_BUILD_CPU_LEGACY@
OPENJDK_TARGET_LIBC := @OPENJDK_BUILD_LIBC@
OPENJDK_TARGET_OS_INCLUDE_SUBDIR := @OPENJDK_BUILD_OS_INCLUDE_SUBDIR@
HOTSPOT_TARGET_OS := @HOTSPOT_BUILD_OS@
@@ -68,7 +66,6 @@ HOTSPOT_TARGET_OS_TYPE := @HOTSPOT_BUILD_OS_TYPE@
HOTSPOT_TARGET_CPU := @HOTSPOT_BUILD_CPU@
HOTSPOT_TARGET_CPU_ARCH := @HOTSPOT_BUILD_CPU_ARCH@
HOTSPOT_TARGET_CPU_DEFINE := @HOTSPOT_BUILD_CPU_DEFINE@
HOTSPOT_TARGET_LIBC := @HOTSPOT_BUILD_LIBC@
CFLAGS_JDKLIB := @OPENJDK_BUILD_CFLAGS_JDKLIB@
CXXFLAGS_JDKLIB := @OPENJDK_BUILD_CXXFLAGS_JDKLIB@

View File

@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -43,7 +43,7 @@ export CMP="@CMP@"
export CP="@CP@"
export CUT="@CUT@"
export DIFF="@DIFF@"
export DUMPBIN="@DUMPBIN@"
export DUMPBIN="@FIXPATH@ @DUMPBIN@"
export EXPR="@EXPR@"
export FILE="@FILE@"
export FIND="@FIND@"
@@ -53,6 +53,7 @@ export LDD="@LDD@"
export LN="@LN@"
export MKDIR="@MKDIR@"
export MV="@MV@"
export NAWK="@NAWK@"
export NM="@GNM@"
export OBJDUMP="@OBJDUMP@"
export OTOOL="@OTOOL@"
@@ -86,10 +87,12 @@ else
fi
if [ "$OPENJDK_TARGET_OS" = "windows" ]; then
if [[ $OPENJDK_BUILD_OS_ENV =~ ^windows.wsl ]]; then
export WSLENV=PATH/l
if [ "$OPENJDK_BUILD_OS_ENV" = "windows.wsl" ]; then
export FIXPATH_PATH="@VS_PATH_WINDOWS@"
export WSLENV="$WSLENV:FIXPATH_PATH:DEBUG_FIXPATH"
else
export PATH="@VS_PATH@"
fi
export PATH="$PATH:@TOOLCHAIN_PATH@"
fi
export HOTSPOT_BUILD_TIME="@HOTSPOT_BUILD_TIME@"

View File

@@ -1,6 +1,6 @@
#!/bin/bash
#
# Copyright (c) 2012, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2012, 2019, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -49,7 +49,7 @@ export LC_ALL=C
if test "x$CUSTOM_CONFIG_DIR" != x; then
custom_hook=$CUSTOM_CONFIG_DIR/custom-hook.m4
if test ! -e $custom_hook; then
echo "CUSTOM_CONFIG_DIR ($CUSTOM_CONFIG_DIR) not pointing to a proper custom config dir."
echo "CUSTOM_CONFIG_DIR not pointing to a proper custom config dir."
echo "Error: Cannot continue" 1>&2
exit 1
fi
@@ -78,12 +78,11 @@ generated_script="$build_support_dir/generated-configure.sh"
###
autoconf_missing_help() {
APT_GET="`type -p apt-get 2> /dev/null`"
YUM="`type -p yum 2> /dev/null`"
BREW="`type -p brew 2> /dev/null`"
ZYPPER="`type -p zypper 2> /dev/null`"
CYGWIN="`type -p cygpath 2> /dev/null`"
UNAMEOUT="`uname 2> /dev/null`"
APT_GET="`which apt-get 2> /dev/null | grep -v '^no apt-get in'`"
YUM="`which yum 2> /dev/null | grep -v '^no yum in'`"
BREW="`which brew 2> /dev/null | grep -v '^no brew in'`"
ZYPPER="`which zypper 2> /dev/null | grep -v '^no zypper in'`"
CYGWIN="`which cygpath 2> /dev/null | grep -v '^no cygpath in'`"
if test "x$ZYPPER" != x; then
PKGHANDLER_COMMAND="sudo zypper install autoconf"
@@ -93,8 +92,6 @@ autoconf_missing_help() {
PKGHANDLER_COMMAND="sudo yum install autoconf"
elif test "x$BREW" != x; then
PKGHANDLER_COMMAND="brew install autoconf"
elif test "x$UNAMEOUT" == xAIX; then
echo "You might be able to fix this by installing autoconf from the 'AIX Toolbox for Linux Applications' (or compile it from the sources)."
elif test "x$CYGWIN" != x; then
PKGHANDLER_COMMAND="( cd <location of cygwin setup.exe> && cmd /c setup -q -P autoconf )"
fi
@@ -114,10 +111,10 @@ generate_configure_script() {
exit 1
fi
else
AUTOCONF="`type -p autoconf 2> /dev/null`"
AUTOCONF="`which autoconf 2> /dev/null | grep -v '^no autoconf in'`"
if test "x$AUTOCONF" = x; then
echo
echo "Autoconf is not found on the PATH ($PATH), and AUTOCONF is not set."
echo "Autoconf is not found on the PATH, and AUTOCONF is not set."
echo "You need autoconf to be able to generate a runnable configure script."
autoconf_missing_help
echo "Error: Cannot find autoconf" 1>&2

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -106,8 +106,6 @@ BASIC_SETUP_DEVKIT
# To properly create a configuration name, we need to have the OpenJDK target
# and options (variants and debug level) parsed.
BASIC_SETUP_OUTPUT_DIR
# After we have the output dir we can finalize the fixpath wrapper
BASIC_WINDOWS_FINALIZE_FIXPATH
# Must be done before we can call HELP_MSG_MISSING_DEPENDENCY.
HELP_SETUP_DEPENDENCY_HELP
@@ -144,7 +142,6 @@ JDKVER_SETUP_JDK_VERSION_NUMBERS
BOOTJDK_SETUP_BOOT_JDK
BOOTJDK_SETUP_BUILD_JDK
BOOTJDK_SETUP_DOCS_REFERENCE_JDK
###############################################################################
#
@@ -206,8 +203,7 @@ FLAGS_POST_TOOLCHAIN
PLATFORM_SETUP_OPENJDK_TARGET_BITS
PLATFORM_SETUP_OPENJDK_TARGET_ENDIANNESS
# Configure flags for the tools. Need to know if we should build reproducible.
JDKOPT_SETUP_REPRODUCIBLE_BUILD
# Configure flags for the tools
FLAGS_SETUP_FLAGS
# Setup debug symbols (need objcopy from the toolchain for that)
@@ -223,6 +219,9 @@ JDKOPT_SETUP_ADDRESS_SANITIZER
#
###############################################################################
# After we have toolchain, we can compile fixpath. It's needed by the lib checks.
BASIC_COMPILE_FIXPATH
LIB_DETERMINE_DEPENDENCIES
LIB_SETUP_LIBRARIES
@@ -243,12 +242,13 @@ HOTSPOT_SETUP_MISC
#
###############################################################################
JDKOPT_DETECT_INTREE_EC
JDKOPT_ENABLE_DISABLE_FAILURE_HANDLER
JDKOPT_ENABLE_DISABLE_GENERATE_CLASSLIST
JDKOPT_EXCLUDE_TRANSLATIONS
JDKOPT_ENABLE_DISABLE_MANPAGES
JDKOPT_ENABLE_DISABLE_CDS_ARCHIVE
JDKOPT_ENABLE_DISABLE_COMPATIBLE_CDS_ALIGNMENT
JDKOPT_SETUP_REPRODUCIBLE_BUILD
###############################################################################
#
@@ -295,7 +295,6 @@ BASIC_CHECK_LEFTOVER_OVERRIDDEN
CONFIG_STATUS="$CONFIGURESUPPORT_OUTPUTDIR/config.status"
# Create the actual output files. Now the main work of configure is done.
BASIC_WINDOWS_FINALIZE
AC_OUTPUT
# After AC_OUTPUT, we need to do final work

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -130,20 +130,14 @@ AC_DEFUN([FLAGS_SETUP_WARNINGS],
case "${TOOLCHAIN_TYPE}" in
microsoft)
DISABLE_WARNING_PREFIX="-wd"
BUILD_CC_DISABLE_WARNING_PREFIX="-wd"
CFLAGS_WARNINGS_ARE_ERRORS="-WX"
WARNINGS_ENABLE_ALL="-W3"
DISABLED_WARNINGS="4800"
if test "x$TOOLCHAIN_VERSION" = x2017; then
# VS2017 incorrectly triggers this warning for constexpr
DISABLED_WARNINGS+=" 4307"
fi
;;
gcc)
DISABLE_WARNING_PREFIX="-Wno-"
BUILD_CC_DISABLE_WARNING_PREFIX="-Wno-"
CFLAGS_WARNINGS_ARE_ERRORS="-Werror"
# Additional warnings that are not activated by -Wall and -Wextra
@@ -155,6 +149,7 @@ AC_DEFUN([FLAGS_SETUP_WARNINGS],
WARNINGS_ENABLE_ALL_CXXFLAGS="$WARNINGS_ENABLE_ALL_CFLAGS $WARNINGS_ENABLE_ADDITIONAL_CXX"
DISABLED_WARNINGS="unused-parameter unused"
BUILD_CC_DISABLE_WARNING_PREFIX="-Wno-"
;;
clang)
@@ -168,6 +163,11 @@ AC_DEFUN([FLAGS_SETUP_WARNINGS],
DISABLED_WARNINGS="unknown-warning-option unused-parameter unused"
if test "x$OPENJDK_TARGET_OS" = xmacosx; then
# missing-method-return-type triggers in JavaNativeFoundation framework
DISABLED_WARNINGS="$DISABLED_WARNINGS missing-method-return-type"
fi
;;
xlc)
@@ -227,14 +227,8 @@ AC_DEFUN([FLAGS_SETUP_OPTIMIZATION],
# -D_FORTIFY_SOURCE=2 hardening option needs optimization (at least -O1) enabled
# set for lower O-levels -U_FORTIFY_SOURCE to overwrite previous settings
if test "x$OPENJDK_TARGET_OS" = xlinux -a "x$DEBUG_LEVEL" = "xfastdebug"; then
ENABLE_FORTIFY_CFLAGS="-D_FORTIFY_SOURCE=2"
DISABLE_FORTIFY_CFLAGS="-U_FORTIFY_SOURCE"
# ASan doesn't work well with _FORTIFY_SOURCE
# See https://github.com/google/sanitizers/wiki/AddressSanitizer#faq
if test "x$ASAN_ENABLED" = xyes; then
ENABLE_FORTIFY_CFLAGS="${DISABLE_FORTIFY_CFLAGS}"
else
ENABLE_FORTIFY_CFLAGS="-D_FORTIFY_SOURCE=2"
fi
C_O_FLAG_HIGHEST_JVM="${C_O_FLAG_HIGHEST_JVM} ${ENABLE_FORTIFY_CFLAGS}"
C_O_FLAG_HIGHEST="${C_O_FLAG_HIGHEST} ${ENABLE_FORTIFY_CFLAGS}"
C_O_FLAG_HI="${C_O_FLAG_HI} ${ENABLE_FORTIFY_CFLAGS}"
@@ -518,18 +512,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
fi
TOOLCHAIN_CFLAGS_JDK_CONLY="$LANGSTD_CFLAGS $TOOLCHAIN_CFLAGS_JDK_CONLY"
# CXXFLAGS C++ language level for all of JDK, including Hotspot.
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang || test "x$TOOLCHAIN_TYPE" = xxlc; then
LANGSTD_CXXFLAGS="-std=c++14"
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
LANGSTD_CXXFLAGS="-std:c++14"
else
AC_MSG_ERROR([Don't know how to enable C++14 for this toolchain])
fi
TOOLCHAIN_CFLAGS_JDK_CXXONLY="$TOOLCHAIN_CFLAGS_JDK_CXXONLY $LANGSTD_CXXFLAGS"
TOOLCHAIN_CFLAGS_JVM="$TOOLCHAIN_CFLAGS_JVM $LANGSTD_CXXFLAGS"
ADLC_LANGSTD_CXXFLAGS="$LANGSTD_CXXFLAGS"
# CFLAGS WARNINGS STUFF
# Set JVM_CFLAGS warning handling
if test "x$TOOLCHAIN_TYPE" = xgcc; then
@@ -560,11 +542,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
fi
fi
OS_CFLAGS="$OS_CFLAGS -DLIBC=$OPENJDK_TARGET_LIBC"
if test "x$OPENJDK_TARGET_LIBC" = xmusl; then
OS_CFLAGS="$OS_CFLAGS -DMUSL_LIBC"
fi
# Where does this really belong??
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
PICFLAG="-fPIC"
@@ -600,6 +577,25 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_HELPER],
fi
fi
# Optional POSIX functionality needed by the JVM
#
# Check if clock_gettime is available and in which library. This indicates
# availability of CLOCK_MONOTONIC for hotspot. But we don't need to link, so
# don't let it update LIBS.
save_LIBS="$LIBS"
AC_SEARCH_LIBS(clock_gettime, rt, [HAS_CLOCK_GETTIME=true], [])
if test "x$LIBS" = "x-lrt "; then
CLOCK_GETTIME_IN_LIBRT=true
fi
LIBS="$save_LIBS"
if test "x$HAS_CLOCK_GETTIME" = "xtrue"; then
OS_CFLAGS_JVM="$OS_CFLAGS_JVM -DSUPPORTS_CLOCK_MONOTONIC"
if test "x$CLOCK_GETTIME_IN_LIBRT" = "xtrue"; then
OS_CFLAGS_JVM="$OS_CFLAGS_JVM -DNEEDS_LIBRT"
fi
fi
# Extra flags needed when building optional static versions of certain
# JDK libraries.
STATIC_LIBS_CFLAGS="-DSTATIC_BUILD=1"
@@ -640,17 +636,21 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -DARCH='\"$FLAGS_CPU_LEGACY\"' \
-D$FLAGS_CPU_LEGACY"
if test "x$FLAGS_CPU_BITS" = x64 && test "x$FLAGS_OS" != xaix; then
# xlc on AIX defines _LP64=1 by default and issues a warning if we redefine it.
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -D_LP64=1"
$1_DEFINES_CPU_JVM="${$1_DEFINES_CPU_JVM} -D_LP64=1"
if test "x$FLAGS_CPU_BITS" = x64; then
# -D_LP64=1 is only set on linux and mac. Setting on windows causes diff in
# unpack200.exe.
if test "x$FLAGS_OS" = xlinux || test "x$FLAGS_OS" = xmacosx; then
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -D_LP64=1"
fi
if test "x$FLAGS_OS" != xaix; then
# xlc on AIX defines _LP64=1 by default and issues a warning if we redefine it.
$1_DEFINES_CPU_JVM="${$1_DEFINES_CPU_JVM} -D_LP64=1"
fi
fi
# toolchain dependend, per-cpu
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
if test "x$FLAGS_CPU" = xaarch64; then
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -D_ARM64_ -Darm64"
elif test "x$FLAGS_CPU" = xx86_64; then
if test "x$FLAGS_CPU" = xx86_64; then
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -D_AMD64_ -Damd64"
else
$1_DEFINES_CPU_JDK="${$1_DEFINES_CPU_JDK} -D_X86_ -Dx86"
@@ -660,21 +660,9 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
# CFLAGS PER CPU
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
# COMMON to gcc and clang
AC_MSG_CHECKING([if $1 is x86])
if test "x$FLAGS_CPU" = xx86; then
AC_MSG_RESULT([yes])
AC_MSG_CHECKING([if control flow protection is enabled by additional compiler flags])
if echo "${EXTRA_CFLAGS}${EXTRA_CXXFLAGS}${EXTRA_ASFLAGS}" | ${GREP} -q 'fcf-protection' ; then
# cf-protection requires CMOV and thus i686
$1_CFLAGS_CPU="-march=i686"
AC_MSG_RESULT([yes, forcing ${$1_CFLAGS_CPU}])
else
# Force compatibility with i586 on 32 bit intel platforms.
$1_CFLAGS_CPU="-march=i586"
AC_MSG_RESULT([no, forcing ${$1_CFLAGS_CPU}])
fi
else
AC_MSG_RESULT([no])
# Force compatibility with i586 on 32 bit intel platforms.
$1_CFLAGS_CPU="-march=i586"
fi
fi
@@ -706,6 +694,13 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
$1_CFLAGS_CPU_JDK="${$1_CFLAGS_CPU_JDK} -fno-omit-frame-pointer"
fi
$1_CXXSTD_CXXFLAG="-std=gnu++98"
FLAGS_CXX_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [${$1_CXXSTD_CXXFLAG}],
PREFIX: $3, IF_FALSE: [$1_CXXSTD_CXXFLAG=""])
$1_TOOLCHAIN_CFLAGS_JDK_CXXONLY="${$1_CXXSTD_CXXFLAG}"
$1_TOOLCHAIN_CFLAGS_JVM="${$1_TOOLCHAIN_CFLAGS_JVM} ${$1_CXXSTD_CXXFLAG}"
$2ADLC_CXXFLAG="${$1_CXXSTD_CXXFLAG}"
elif test "x$TOOLCHAIN_TYPE" = xclang; then
if test "x$FLAGS_OS" = xlinux; then
# ppc test not really needed for clang
@@ -737,27 +732,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
$1_TOOLCHAIN_CFLAGS="${$1_GCC6_CFLAGS}"
$1_WARNING_CFLAGS_JVM="-Wno-format-zero-length -Wtype-limits -Wuninitialized"
elif test "x$TOOLCHAIN_TYPE" = xclang; then
NO_DELETE_NULL_POINTER_CHECKS_CFLAG="-fno-delete-null-pointer-checks"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [$NO_DELETE_NULL_POINTER_CHECKS_CFLAG],
PREFIX: $3,
IF_FALSE: [
NO_DELETE_NULL_POINTER_CHECKS_CFLAG=
]
)
$1_TOOLCHAIN_CFLAGS="${NO_DELETE_NULL_POINTER_CHECKS_CFLAG}"
fi
if test "x$TOOLCHAIN_TYPE" = xmicrosoft && test "x$ENABLE_REPRODUCIBLE_BUILD" = xtrue; then
# Enabling deterministic creates warnings if __DATE__ or __TIME__ are
# used, and since we are, silence that warning.
REPRODUCIBLE_CFLAGS="-experimental:deterministic -wd5048"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [${REPRODUCIBLE_CFLAGS}],
PREFIX: $3,
IF_FALSE: [
REPRODUCIBLE_CFLAGS=
]
)
fi
# Prevent the __FILE__ macro from generating absolute paths into the built
@@ -778,29 +752,6 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
FILE_MACRO_CFLAGS=
]
)
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft &&
test "x$ENABLE_REPRODUCIBLE_BUILD" = xtrue; then
# There is a known issue with the pathmap if the mapping is made to the
# empty string. Add a minimal string "s" as prefix to work around this.
workspace_root_win=`$FIXPATH_BASE print "${WORKSPACE_ROOT%/}"`
# PATHMAP_FLAGS is also added to LDFLAGS in flags-ldflags.m4.
PATHMAP_FLAGS="-pathmap:${workspace_root_win//\//\\\\}=s \
-pathmap:${workspace_root_win}=s"
FILE_MACRO_CFLAGS="$PATHMAP_FLAGS"
FLAGS_COMPILER_CHECK_ARGUMENTS(ARGUMENT: [${FILE_MACRO_CFLAGS}],
PREFIX: $3,
IF_FALSE: [
PATHMAP_FLAGS=
FILE_MACRO_CFLAGS=
]
)
fi
AC_MSG_CHECKING([how to prevent absolute paths in output])
if test "x$FILE_MACRO_CFLAGS" != x; then
AC_MSG_RESULT([using compiler options])
else
AC_MSG_RESULT([using relative paths])
fi
fi
AC_SUBST(FILE_MACRO_CFLAGS)
@@ -809,13 +760,12 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
CFLAGS_JVM_COMMON="$ALWAYS_CFLAGS_JVM $ALWAYS_DEFINES_JVM \
$TOOLCHAIN_CFLAGS_JVM ${$1_TOOLCHAIN_CFLAGS_JVM} \
$OS_CFLAGS $OS_CFLAGS_JVM $CFLAGS_OS_DEF_JVM $DEBUG_CFLAGS_JVM \
$WARNING_CFLAGS $WARNING_CFLAGS_JVM $JVM_PICFLAG $FILE_MACRO_CFLAGS \
$REPRODUCIBLE_CFLAGS"
$WARNING_CFLAGS $WARNING_CFLAGS_JVM $JVM_PICFLAG $FILE_MACRO_CFLAGS"
CFLAGS_JDK_COMMON="$ALWAYS_CFLAGS_JDK $ALWAYS_DEFINES_JDK $TOOLCHAIN_CFLAGS_JDK \
$OS_CFLAGS $CFLAGS_OS_DEF_JDK $DEBUG_CFLAGS_JDK $DEBUG_OPTIONS_FLAGS_JDK \
$WARNING_CFLAGS $WARNING_CFLAGS_JDK $DEBUG_SYMBOLS_CFLAGS_JDK \
$FILE_MACRO_CFLAGS $REPRODUCIBLE_CFLAGS"
$FILE_MACRO_CFLAGS"
# Use ${$2EXTRA_CFLAGS} to block EXTRA_CFLAGS to be added to build flags.
# (Currently we don't have any OPENJDK_BUILD_EXTRA_CFLAGS, but that might
@@ -845,7 +795,7 @@ AC_DEFUN([FLAGS_SETUP_CFLAGS_CPU_DEP],
AC_SUBST($2CFLAGS_JDKEXE)
AC_SUBST($2CXXFLAGS_JDKLIB)
AC_SUBST($2CXXFLAGS_JDKEXE)
AC_SUBST($2ADLC_LANGSTD_CXXFLAGS)
AC_SUBST($2ADLC_CXXFLAG)
COMPILER_FP_CONTRACT_OFF_FLAG="-ffp-contract=off"
# Check that the compiler supports -ffp-contract=off flag

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -53,7 +53,6 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS],
LDFLAGS_TESTEXE="${TARGET_LDFLAGS_JDK_LIBPATH}"
AC_SUBST(LDFLAGS_TESTEXE)
AC_SUBST(ADLC_LDFLAGS)
])
################################################################################
@@ -66,11 +65,11 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
# Add -z,defs, to forbid undefined symbols in object files.
# add -z,relro (mark relocations read only) for all libs
# add -z,now ("full relro" - more of the Global Offset Table GOT is marked read only)
BASIC_LDFLAGS="-Wl,-z,defs -Wl,-z,relro -Wl,-z,now"
BASIC_LDFLAGS="-Wl,--hash-style=gnu -Wl,-z,defs -Wl,-z,relro -Wl,-z,now"
# Linux : remove unused code+data in link step
if test "x$ENABLE_LINKTIME_GC" = xtrue; then
if test "x$OPENJDK_TARGET_CPU" = xs390x; then
BASIC_LDFLAGS="$BASIC_LDFLAGS -Wl,--gc-sections"
BASIC_LDFLAGS="$BASIC_LDFLAGS -Wl,--gc-sections -Wl,--print-gc-sections"
else
BASIC_LDFLAGS_JDK_ONLY="$BASIC_LDFLAGS_JDK_ONLY -Wl,--gc-sections"
fi
@@ -106,7 +105,7 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
# Assume clang or gcc.
# FIXME: We should really generalize SET_SHARED_LIBRARY_ORIGIN instead.
OS_LDFLAGS_JVM_ONLY="-Wl,-rpath,@loader_path/. -Wl,-rpath,@loader_path/.."
OS_LDFLAGS="-mmacosx-version-min=$MACOSX_VERSION_MIN"
OS_LDFLAGS_JDK_ONLY="-mmacosx-version-min=$MACOSX_VERSION_MIN"
fi
fi
@@ -137,17 +136,9 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_HELPER],
fi
fi
if test "x$ENABLE_REPRODUCIBLE_BUILD" = "xtrue"; then
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
REPRODUCIBLE_LDFLAGS="-experimental:deterministic"
fi
fi
if test "x$ALLOW_ABSOLUTE_PATHS_IN_OUTPUT" = "xfalse"; then
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
BASIC_LDFLAGS="$BASIC_LDFLAGS -pdbaltpath:%_PDB%"
# PATHMAP_FLAGS is setup in flags-cflags.m4.
FILE_MACRO_LDFLAGS="${PATHMAP_FLAGS}"
fi
fi
@@ -172,30 +163,21 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_CPU_DEP],
$1_CPU_LDFLAGS="$ARM_ARCH_TYPE_FLAGS $ARM_FLOAT_TYPE_FLAGS"
fi
# MIPS ABI does not support GNU hash style
if test "x${OPENJDK_$1_CPU}" = xmips ||
test "x${OPENJDK_$1_CPU}" = xmipsel ||
test "x${OPENJDK_$1_CPU}" = xmips64 ||
test "x${OPENJDK_$1_CPU}" = xmips64el; then
$1_CPU_LDFLAGS="${$1_CPU_LDFLAGS} -Wl,--hash-style=sysv"
else
$1_CPU_LDFLAGS="${$1_CPU_LDFLAGS} -Wl,--hash-style=gnu"
fi
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
if test "x${OPENJDK_$1_CPU_BITS}" = "x32"; then
$1_CPU_EXECUTABLE_LDFLAGS="-stack:327680"
elif test "x${OPENJDK_$1_CPU_BITS}" = "x64"; then
$1_CPU_EXECUTABLE_LDFLAGS="-stack:1048576"
fi
if test "x${OPENJDK_$1_CPU}" = "xx86"; then
$1_CPU_LDFLAGS="-safeseh"
# NOTE: Old build added -machine. Probably not needed.
$1_CPU_LDFLAGS_JVM_ONLY="-machine:I386"
$1_CPU_EXECUTABLE_LDFLAGS="-stack:327680"
else
$1_CPU_LDFLAGS_JVM_ONLY="-machine:AMD64"
$1_CPU_EXECUTABLE_LDFLAGS="-stack:1048576"
fi
fi
# JVM_VARIANT_PATH depends on if this is build or target...
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
$1_LDFLAGS_JDK_LIBPATH="-libpath:\$(SUPPORT_OUTPUTDIR)/modules_libs/java.base"
$1_LDFLAGS_JDK_LIBPATH="-libpath:${OUTPUTDIR}/support/modules_libs/java.base"
else
$1_LDFLAGS_JDK_LIBPATH="-L\$(SUPPORT_OUTPUTDIR)/modules_libs/java.base \
-L\$(SUPPORT_OUTPUTDIR)/modules_libs/java.base/${$1_JVM_VARIANT_PATH}"
@@ -203,17 +185,15 @@ AC_DEFUN([FLAGS_SETUP_LDFLAGS_CPU_DEP],
# Export variables according to old definitions, prefix with $2 if present.
LDFLAGS_JDK_COMMON="$BASIC_LDFLAGS $BASIC_LDFLAGS_JDK_ONLY \
$OS_LDFLAGS $DEBUGLEVEL_LDFLAGS_JDK_ONLY ${$2EXTRA_LDFLAGS}"
$OS_LDFLAGS_JDK_ONLY $DEBUGLEVEL_LDFLAGS_JDK_ONLY ${$2EXTRA_LDFLAGS}"
$2LDFLAGS_JDKLIB="$LDFLAGS_JDK_COMMON $BASIC_LDFLAGS_JDK_LIB_ONLY \
${$1_LDFLAGS_JDK_LIBPATH} $SHARED_LIBRARY_FLAGS \
$REPRODUCIBLE_LDFLAGS $FILE_MACRO_LDFLAGS"
${$1_LDFLAGS_JDK_LIBPATH} $SHARED_LIBRARY_FLAGS"
$2LDFLAGS_JDKEXE="$LDFLAGS_JDK_COMMON $EXECUTABLE_LDFLAGS \
${$1_CPU_EXECUTABLE_LDFLAGS} $REPRODUCIBLE_LDFLAGS $FILE_MACRO_LDFLAGS"
${$1_CPU_EXECUTABLE_LDFLAGS}"
$2JVM_LDFLAGS="$BASIC_LDFLAGS $BASIC_LDFLAGS_JVM_ONLY $OS_LDFLAGS $OS_LDFLAGS_JVM_ONLY \
$2JVM_LDFLAGS="$BASIC_LDFLAGS $BASIC_LDFLAGS_JVM_ONLY $OS_LDFLAGS_JVM_ONLY \
$DEBUGLEVEL_LDFLAGS $DEBUGLEVEL_LDFLAGS_JVM_ONLY $BASIC_LDFLAGS_ONLYCXX \
${$1_CPU_LDFLAGS} ${$1_CPU_LDFLAGS_JVM_ONLY} ${$2EXTRA_LDFLAGS} \
$REPRODUCIBLE_LDFLAGS $FILE_MACRO_LDFLAGS"
${$1_CPU_LDFLAGS} ${$1_CPU_LDFLAGS_JVM_ONLY} ${$2EXTRA_LDFLAGS}"
AC_SUBST($2LDFLAGS_JDKLIB)
AC_SUBST($2LDFLAGS_JDKEXE)

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -76,26 +76,8 @@ AC_DEFUN([FLAGS_SETUP_RCFLAGS],
# platform independent
AC_DEFUN([FLAGS_SETUP_ASFLAGS],
[
if test "x$TOOLCHAIN_TYPE" = xgcc || test "x$TOOLCHAIN_TYPE" = xclang; then
# Force preprocessor to run, just to make sure
BASIC_ASFLAGS="-x assembler-with-cpp"
elif test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
BASIC_ASFLAGS="-nologo -c"
fi
AC_SUBST(BASIC_ASFLAGS)
if test "x$OPENJDK_TARGET_OS" = xmacosx; then
JVM_BASIC_ASFLAGS="-mno-omit-leaf-frame-pointer -mstack-alignment=16"
# Fix linker warning.
# Code taken from make/autoconf/flags-cflags.m4 and adapted.
JVM_BASIC_ASFLAGS+="-DMAC_OS_X_VERSION_MIN_REQUIRED=$MACOSX_VERSION_MIN_NODOTS \
-mmacosx-version-min=$MACOSX_VERSION_MIN"
if test -n "$MACOSX_VERSION_MAX"; then
JVM_BASIC_ASFLAGS+="$OS_CFLAGS \
-DMAC_OS_X_VERSION_MAX_ALLOWED=$MACOSX_VERSION_MAX_NODOTS"
fi
JVM_BASIC_ASFLAGS="-x assembler-with-cpp -mno-omit-leaf-frame-pointer -mstack-alignment=16"
fi
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -125,25 +125,19 @@ AC_DEFUN([FLAGS_SETUP_MACOSX_VERSION],
[
# Additional macosx handling
if test "x$OPENJDK_TARGET_OS" = xmacosx; then
# The expected format for <version> is either nn.n.n or nn.nn.nn. See
# /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/AvailabilityVersions.h
# MACOSX_VERSION_MIN specifies the lowest version of Macosx that the built
# binaries should be compatible with, even if compiled on a newer version
# of the OS. It currently has a hard coded value. Setting this also limits
# exposure to API changes in header files. Bumping this is likely to
# require code changes to build.
if test "x$OPENJDK_TARGET_CPU_ARCH" = xaarch64; then
MACOSX_VERSION_MIN=11.00.00
else
MACOSX_VERSION_MIN=10.12.0
fi
MACOSX_VERSION_MIN=10.9.0
MACOSX_VERSION_MIN_NODOTS=${MACOSX_VERSION_MIN//\./}
AC_SUBST(MACOSX_VERSION_MIN)
# Setting --with-macosx-version-max=<version> makes it an error to build or
# link to macosx APIs that are newer than the given OS version.
# link to macosx APIs that are newer than the given OS version. The expected
# format for <version> is either nn.n.n or nn.nn.nn. See /usr/include/AvailabilityMacros.h.
AC_ARG_WITH([macosx-version-max], [AS_HELP_STRING([--with-macosx-version-max],
[error on use of newer functionality. @<:@macosx@:>@])],
[
@@ -224,20 +218,10 @@ AC_DEFUN([FLAGS_SETUP_SYSROOT_FLAGS],
# We also need -iframework<path>/System/Library/Frameworks
$1SYSROOT_CFLAGS="[$]$1SYSROOT_CFLAGS -iframework [$]$1SYSROOT/System/Library/Frameworks"
$1SYSROOT_LDFLAGS="[$]$1SYSROOT_LDFLAGS -iframework [$]$1SYSROOT/System/Library/Frameworks"
if test -d "[$]$1SYSROOT/System/Library/Frameworks/JavaVM.framework/Frameworks" ; then
# These always need to be set on macOS 10.X, or we can't find the frameworks embedded in JavaVM.framework
# set this here so it doesn't have to be peppered throughout the forest
$1SYSROOT_CFLAGS="[$]$1SYSROOT_CFLAGS -F [$]$1SYSROOT/System/Library/Frameworks/JavaVM.framework/Frameworks"
$1SYSROOT_LDFLAGS="[$]$1SYSROOT_LDFLAGS -F [$]$1SYSROOT/System/Library/Frameworks/JavaVM.framework/Frameworks"
fi
fi
# For the microsoft toolchain, we need to get the SYSROOT flags from the
# Visual Studio environment. Currently we cannot handle this as a separate
# build toolchain.
if test "x$1" = x && test "x$OPENJDK_BUILD_OS" = "xwindows" \
&& test "x$TOOLCHAIN_TYPE" = "xmicrosoft"; then
TOOLCHAIN_SETUP_VISUAL_STUDIO_ENV
# These always need to be set, or we can't find the frameworks embedded in JavaVM.framework
# set this here so it doesn't have to be peppered throughout the forest
$1SYSROOT_CFLAGS="[$]$1SYSROOT_CFLAGS -F [$]$1SYSROOT/System/Library/Frameworks/JavaVM.framework/Frameworks"
$1SYSROOT_LDFLAGS="[$]$1SYSROOT_LDFLAGS -F [$]$1SYSROOT/System/Library/Frameworks/JavaVM.framework/Frameworks"
fi
AC_SUBST($1SYSROOT_CFLAGS)
@@ -248,7 +232,6 @@ AC_DEFUN_ONCE([FLAGS_PRE_TOOLCHAIN],
[
# We should always include user supplied flags
FLAGS_SETUP_USER_SUPPLIED_FLAGS
# The sysroot flags are needed for configure to be able to run the compilers
FLAGS_SETUP_SYSROOT_FLAGS
@@ -265,14 +248,6 @@ AC_DEFUN_ONCE([FLAGS_PRE_TOOLCHAIN],
fi
fi
if test "x$OPENJDK_TARGET_OS" = xmacosx; then
if test "x$OPENJDK_TARGET_CPU" = xaarch64; then
MACHINE_FLAG="$MACHINE_FLAG -arch arm64"
elif test "x$OPENJDK_TARGET_CPU" = xx86_64; then
MACHINE_FLAG="$MACHINE_FLAG -arch x86_64"
fi
fi
# FIXME: global flags are not used yet...
# The "global" flags will *always* be set. Without them, it is not possible to
# get a working compilation.
@@ -281,6 +256,10 @@ AC_DEFUN_ONCE([FLAGS_PRE_TOOLCHAIN],
GLOBAL_LDFLAGS="$MACHINE_FLAG $SYSROOT_LDFLAGS $USER_LDFLAGS"
# FIXME: Don't really know how to do with this, but this was the old behavior
GLOBAL_CPPFLAGS="$SYSROOT_CFLAGS"
AC_SUBST(GLOBAL_CFLAGS)
AC_SUBST(GLOBAL_CXXFLAGS)
AC_SUBST(GLOBAL_LDFLAGS)
AC_SUBST(GLOBAL_CPPFLAGS)
# FIXME: For compatilibity, export this as EXTRA_CFLAGS for now.
EXTRA_CFLAGS="$MACHINE_FLAG $USER_CFLAGS"
@@ -299,14 +278,6 @@ AC_DEFUN_ONCE([FLAGS_PRE_TOOLCHAIN],
CXXFLAGS="$GLOBAL_CXXFLAGS"
LDFLAGS="$GLOBAL_LDFLAGS"
CPPFLAGS="$GLOBAL_CPPFLAGS"
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# When autoconf sends both compiler and linker flags to cl.exe at the same
# time, linker flags must be last at the command line. Achieve this by
# moving them to LIBS.
LIBS="$LIBS -link $LDFLAGS"
LDFLAGS=""
fi
])
AC_DEFUN([FLAGS_SETUP_TOOLCHAIN_CONTROL],
@@ -374,13 +345,15 @@ AC_DEFUN([FLAGS_SETUP_TOOLCHAIN_CONTROL],
# Generate make dependency files
if test "x$TOOLCHAIN_TYPE" = xgcc; then
GENDEPS_FLAGS="-MMD -MF"
C_FLAG_DEPS="-MMD -MF"
elif test "x$TOOLCHAIN_TYPE" = xclang; then
GENDEPS_FLAGS="-MMD -MF"
C_FLAG_DEPS="-MMD -MF"
elif test "x$TOOLCHAIN_TYPE" = xxlc; then
GENDEPS_FLAGS="-qmakedep=gcc -MF"
C_FLAG_DEPS="-qmakedep=gcc -MF"
fi
AC_SUBST(GENDEPS_FLAGS)
CXX_FLAG_DEPS="$C_FLAG_DEPS"
AC_SUBST(C_FLAG_DEPS)
AC_SUBST(CXX_FLAG_DEPS)
])
AC_DEFUN_ONCE([FLAGS_POST_TOOLCHAIN],
@@ -395,6 +368,9 @@ AC_DEFUN_ONCE([FLAGS_POST_TOOLCHAIN],
BUILD_SYSROOT_LDFLAGS="$SYSROOT_LDFLAGS"
fi
fi
AC_SUBST(BUILD_SYSROOT_CFLAGS)
AC_SUBST(BUILD_SYSROOT_LDFLAGS)
])
AC_DEFUN([FLAGS_SETUP_FLAGS],

View File

@@ -25,7 +25,7 @@
AC_DEFUN_ONCE([HELP_SETUP_DEPENDENCY_HELP],
[
UTIL_LOOKUP_PROGS(PKGHANDLER, zypper apt-get yum brew port pkgutil pkgadd pacman)
AC_CHECK_PROGS(PKGHANDLER, zypper apt-get yum brew port pkgutil pkgadd)
])
AC_DEFUN([HELP_MSG_MISSING_DEPENDENCY],
@@ -38,6 +38,8 @@ AC_DEFUN([HELP_MSG_MISSING_DEPENDENCY],
HELP_MSG="OpenJDK distributions are available at http://jdk.java.net/."
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
cygwin_help $MISSING_DEPENDENCY
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
msys_help $MISSING_DEPENDENCY
else
PKGHANDLER_COMMAND=
@@ -56,8 +58,6 @@ AC_DEFUN([HELP_MSG_MISSING_DEPENDENCY],
pkgadd_help $MISSING_DEPENDENCY ;;
zypper)
zypper_help $MISSING_DEPENDENCY ;;
pacman)
pacman_help $MISSING_DEPENDENCY ;;
esac
if test "x$PKGHANDLER_COMMAND" != x; then
@@ -83,6 +83,10 @@ cygwin_help() {
esac
}
msys_help() {
PKGHANDLER_COMMAND=""
}
apt_help() {
case $1 in
reduced)
@@ -97,8 +101,6 @@ apt_help() {
PKGHANDLER_COMMAND="sudo apt-get install libfontconfig1-dev" ;;
freetype)
PKGHANDLER_COMMAND="sudo apt-get install libfreetype6-dev" ;;
harfbuzz)
PKGHANDLER_COMMAND="sudo apt-get install libharfbuzz-dev" ;;
ffi)
PKGHANDLER_COMMAND="sudo apt-get install libffi-dev" ;;
x11)
@@ -122,8 +124,6 @@ zypper_help() {
PKGHANDLER_COMMAND="sudo zypper install fontconfig-devel" ;;
freetype)
PKGHANDLER_COMMAND="sudo zypper install freetype-devel" ;;
harfbuzz)
PKGHANDLER_COMMAND="sudo zypper install harfbuzz-devel" ;;
x11)
PKGHANDLER_COMMAND="sudo zypper install libX11-devel libXext-devel libXrender-devel libXrandr-devel libXtst-devel libXt-devel libXi-devel" ;;
ccache)
@@ -143,8 +143,6 @@ yum_help() {
PKGHANDLER_COMMAND="sudo yum install fontconfig-devel" ;;
freetype)
PKGHANDLER_COMMAND="sudo yum install freetype-devel" ;;
harfbuzz)
PKGHANDLER_COMMAND="sudo yum install harfbuzz-devel" ;;
x11)
PKGHANDLER_COMMAND="sudo yum install libXtst-devel libXt-devel libXrender-devel libXrandr-devel libXi-devel" ;;
ccache)
@@ -161,17 +159,6 @@ brew_help() {
esac
}
pacman_help() {
case $1 in
unzip)
PKGHANDLER_COMMAND="sudo pacman -S unzip" ;;
zip)
PKGHANDLER_COMMAND="sudo pacman -S zip" ;;
make)
PKGHANDLER_COMMAND="sudo pacman -S make" ;;
esac
}
port_help() {
PKGHANDLER_COMMAND=""
}
@@ -245,7 +232,6 @@ AC_DEFUN_ONCE([HELP_PRINT_SUMMARY_AND_WARNINGS],
printf "\n"
printf "Configuration summary:\n"
printf "* Name: $CONF_NAME\n"
printf "* Debug level: $DEBUG_LEVEL\n"
printf "* HS debug level: $HOTSPOT_DEBUG_LEVEL\n"
printf "* JVM variants: $JVM_VARIANTS\n"
@@ -264,13 +250,16 @@ AC_DEFUN_ONCE([HELP_PRINT_SUMMARY_AND_WARNINGS],
printf "\n"
printf "Tools summary:\n"
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
printf "* Environment: %s version %s; windows version %s; prefix \"%s\"; root \"%s\"\n" \
"$WINENV_VENDOR" "$WINENV_VERSION" "$WINDOWS_VERSION" "$WINENV_PREFIX" "$WINENV_ROOT"
printf "* Environment: $WINDOWS_ENV_VENDOR version $WINDOWS_ENV_VERSION. Windows version $WINDOWS_VERSION"
if test "x$WINDOWS_ENV_ROOT_PATH" != "x"; then
printf ". Root at $WINDOWS_ENV_ROOT_PATH"
fi
printf "\n"
fi
printf "* Boot JDK: $BOOT_JDK_VERSION (at $BOOT_JDK)\n"
printf "* Toolchain: $TOOLCHAIN_TYPE ($TOOLCHAIN_DESCRIPTION)\n"
printf "* C Compiler: Version $CC_VERSION_NUMBER (at ${CC#"$FIXPATH "})\n"
printf "* C++ Compiler: Version $CXX_VERSION_NUMBER (at ${CXX#"$FIXPATH "})\n"
printf "* C Compiler: Version $CC_VERSION_NUMBER (at $CC)\n"
printf "* C++ Compiler: Version $CXX_VERSION_NUMBER (at $CXX)\n"
printf "\n"
printf "Build performance summary:\n"

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -228,6 +228,23 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_JDK_OPTIONS],
])
###############################################################################
#
# Enable or disable the elliptic curve crypto implementation
#
AC_DEFUN_ONCE([JDKOPT_DETECT_INTREE_EC],
[
AC_MSG_CHECKING([if elliptic curve crypto implementation is present])
if test -d "${TOPDIR}/src/jdk.crypto.ec/share/native/libsunec/impl"; then
ENABLE_INTREE_EC=true
AC_MSG_RESULT([yes])
else
ENABLE_INTREE_EC=false
AC_MSG_RESULT([no])
fi
AC_SUBST(ENABLE_INTREE_EC)
])
AC_DEFUN_ONCE([JDKOPT_SETUP_DEBUG_SYMBOLS],
[
@@ -240,7 +257,11 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_DEBUG_SYMBOLS],
[AS_HELP_STRING([--with-native-debug-symbols],
[set the native debug symbol configuration (none, internal, external, zipped) @<:@varying@:>@])],
[
if test "x$OPENJDK_TARGET_OS" = xwindows; then
if test "x$OPENJDK_TARGET_OS" = xaix; then
if test "x$withval" = xexternal || test "x$withval" = xzipped; then
AC_MSG_ERROR([AIX only supports the parameters 'none' and 'internal' for --with-native-debug-symbols])
fi
elif test "x$OPENJDK_TARGET_OS" = xwindows; then
if test "x$withval" = xinternal; then
AC_MSG_ERROR([Windows does not support the parameter 'internal' for --with-native-debug-symbols])
fi
@@ -250,7 +271,12 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_DEBUG_SYMBOLS],
if test "x$STATIC_BUILD" = xtrue; then
with_native_debug_symbols="none"
else
with_native_debug_symbols="external"
if test "x$OPENJDK_TARGET_OS" = xaix; then
# AIX doesn't support 'external' so use 'internal' as default
with_native_debug_symbols="internal"
else
with_native_debug_symbols="external"
fi
fi
])
AC_MSG_RESULT([$with_native_debug_symbols])
@@ -379,9 +405,9 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_CODE_COVERAGE],
UTIL_FIXUP_PATH(JCOV_HOME)
if test "x$with_jcov_input_jdk" != "x" ; then
JCOV_INPUT_JDK="$with_jcov_input_jdk"
if test ! -f "$JCOV_INPUT_JDK/bin/java" && test ! -f "$JCOV_INPUT_JDK/bin/java.exe"; then
if test ! -f "$JCOV_INPUT_JDK/bin/java$EXE_SUFFIX"; then
AC_MSG_RESULT([fail])
AC_MSG_ERROR([Invalid JDK bundle: "$JCOV_INPUT_JDK/bin/java" does not exist])
AC_MSG_ERROR([Invalid JDK bundle: "$JCOV_INPUT_JDK/bin/java$EXE_SUFFIX" does not exist])
fi
UTIL_FIXUP_PATH(JCOV_INPUT_JDK)
fi
@@ -414,10 +440,7 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_ADDRESS_SANITIZER],
fi
],
IF_ENABLED: [
# ASan is simply incompatible with gcc -Wstringop-truncation. See
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85650
# It's harmless to be suppressed in clang as well.
ASAN_CFLAGS="-fsanitize=address -Wno-stringop-truncation -fno-omit-frame-pointer"
ASAN_CFLAGS="-fsanitize=address -fno-omit-frame-pointer"
ASAN_LDFLAGS="-fsanitize=address"
JVM_CFLAGS="$JVM_CFLAGS $ASAN_CFLAGS"
JVM_LDFLAGS="$JVM_LDFLAGS $ASAN_LDFLAGS"
@@ -576,30 +599,6 @@ AC_DEFUN([JDKOPT_ENABLE_DISABLE_CDS_ARCHIVE],
AC_SUBST(BUILD_CDS_ARCHIVE)
])
################################################################################
#
# Enable the alternative CDS core region alignment
#
AC_DEFUN([JDKOPT_ENABLE_DISABLE_COMPATIBLE_CDS_ALIGNMENT],
[
UTIL_ARG_ENABLE(NAME: compatible-cds-alignment, DEFAULT: false,
RESULT: ENABLE_COMPATIBLE_CDS_ALIGNMENT,
DESC: [enable use alternative compatible cds core region alignment],
DEFAULT_DESC: [disabled],
CHECKING_MSG: [if compatible cds region alignment enabled],
CHECK_AVAILABLE: [
AC_MSG_CHECKING([if CDS archive is available])
if test "x$ENABLE_CDS" = "xfalse"; then
AVAILABLE=false
AC_MSG_RESULT([no (CDS is disabled)])
else
AVAILABLE=true
AC_MSG_RESULT([yes])
fi
])
AC_SUBST(ENABLE_COMPATIBLE_CDS_ALIGNMENT)
])
################################################################################
#
# Disallow any output from containing absolute paths from the build system.
@@ -655,7 +654,7 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_REPRODUCIBLE_BUILD],
SOURCE_DATE=$($DATE +"%s")
AC_MSG_RESULT([$SOURCE_DATE, from 'current'])
elif test "x$with_source_date" = xversion; then
# Use the date from version-numbers.conf
# Use the date from version-numbers
UTIL_GET_EPOCH_TIMESTAMP(SOURCE_DATE, $DEFAULT_VERSION_DATE)
if test "x$SOURCE_DATE" = x; then
AC_MSG_RESULT([unavailable])
@@ -679,27 +678,10 @@ AC_DEFUN_ONCE([JDKOPT_SETUP_REPRODUCIBLE_BUILD],
fi
fi
REPRODUCIBLE_BUILD_DEFAULT=$with_source_date_present
if test "x$OPENJDK_BUILD_OS" = xwindows && \
test "x$ALLOW_ABSOLUTE_PATHS_IN_OUTPUT" = xfalse; then
# To support banning absolute paths on Windows, we must use the -pathmap
# method, which requires reproducible builds.
REPRODUCIBLE_BUILD_DEFAULT=true
fi
UTIL_ARG_ENABLE(NAME: reproducible-build, DEFAULT: $REPRODUCIBLE_BUILD_DEFAULT,
UTIL_ARG_ENABLE(NAME: reproducible-build, DEFAULT: $with_source_date_present,
RESULT: ENABLE_REPRODUCIBLE_BUILD,
DESC: [enable reproducible builds (not yet fully functional)],
DEFAULT_DESC: [enabled if --with-source-date is given or on Windows without absolute paths])
if test "x$OPENJDK_BUILD_OS" = xwindows && \
test "x$ALLOW_ABSOLUTE_PATHS_IN_OUTPUT" = xfalse && \
test "x$ENABLE_REPRODUCIBLE_BUILD" = xfalse; then
AC_MSG_NOTICE([On Windows it is not possible to combine --disable-reproducible-builds])
AC_MSG_NOTICE([with --disable-absolute-paths-in-output.])
AC_MSG_ERROR([Cannot continue])
fi
DEFAULT_DESC: [enabled if --with-source-date is given])
AC_SUBST(SOURCE_DATE)
AC_SUBST(ENABLE_REPRODUCIBLE_BUILD)

View File

@@ -58,9 +58,7 @@ AC_DEFUN([JDKVER_CHECK_AND_SET_NUMBER],
AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
[
# Source the version numbers file
. $TOPDIR/make/conf/version-numbers.conf
# Source the branding file
. $TOPDIR/make/conf/branding.conf
. $AUTOCONF_DIR/version-numbers
# Some non-version number information is set in that file
AC_SUBST(LAUNCHER_NAME)
@@ -69,6 +67,34 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_SUBST(JDK_RC_PLATFORM_NAME)
AC_SUBST(HOTSPOT_VM_DISTRO)
# Set the MACOSX Bundle Name base
AC_ARG_WITH(macosx-bundle-name-base, [AS_HELP_STRING([--with-macosx-bundle-name-base],
[Set the MacOSX Bundle Name base. This is the base name for calculating MacOSX Bundle Names.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_name_base" = xyes; then
AC_MSG_ERROR([--with-macosx-bundle-name-base must have a value])
elif [ ! [[ $with_macosx_bundle_name_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-name-base contains non-printing characters: $with_macosx_bundle_name_base])
elif test "x$with_macosx_bundle_name_base" != x; then
# Set MACOSX_BUNDLE_NAME_BASE to the configured value.
MACOSX_BUNDLE_NAME_BASE="$with_macosx_bundle_name_base"
fi
AC_SUBST(MACOSX_BUNDLE_NAME_BASE)
# Set the MACOSX Bundle ID base
AC_ARG_WITH(macosx-bundle-id-base, [AS_HELP_STRING([--with-macosx-bundle-id-base],
[Set the MacOSX Bundle ID base. This is the base ID for calculating MacOSX Bundle IDs.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_id_base" = xyes; then
AC_MSG_ERROR([--with-macosx-bundle-id-base must have a value])
elif [ ! [[ $with_macosx_bundle_id_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-id-base contains non-printing characters: $with_macosx_bundle_id_base])
elif test "x$with_macosx_bundle_id_base" != x; then
# Set MACOSX_BUNDLE_ID_BASE to the configured value.
MACOSX_BUNDLE_ID_BASE="$with_macosx_bundle_id_base"
fi
AC_SUBST(MACOSX_BUNDLE_ID_BASE)
# Set the JDK RC name
AC_ARG_WITH(jdk-rc-name, [AS_HELP_STRING([--with-jdk-rc-name],
[Set JDK RC name. This is used for FileDescription and ProductName properties
@@ -81,7 +107,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
# Set JDK_RC_NAME to a custom value if '--with-jdk-rc-name' was used and is not empty.
JDK_RC_NAME="$with_jdk_rc_name"
else
# Otherwise calculate from "branding.conf" included above.
# Otherwise calculate from "version-numbers" included above.
JDK_RC_NAME="$PRODUCT_NAME $JDK_RC_PLATFORM_NAME"
fi
AC_SUBST(JDK_RC_NAME)
@@ -96,7 +122,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_MSG_ERROR([--with-vendor-name contains non-printing characters: $with_vendor_name])
elif test "x$with_vendor_name" != x; then
# Only set COMPANY_NAME if '--with-vendor-name' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
# Otherwise we will use the value from "version-numbers" included above.
COMPANY_NAME="$with_vendor_name"
fi
AC_SUBST(COMPANY_NAME)
@@ -110,7 +136,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_MSG_ERROR([--with-vendor-url contains non-printing characters: $with_vendor_url])
elif test "x$with_vendor_url" != x; then
# Only set VENDOR_URL if '--with-vendor-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
# Otherwise we will use the value from "version-numbers" included above.
VENDOR_URL="$with_vendor_url"
fi
AC_SUBST(VENDOR_URL)
@@ -124,7 +150,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_MSG_ERROR([--with-vendor-bug-url contains non-printing characters: $with_vendor_bug_url])
elif test "x$with_vendor_bug_url" != x; then
# Only set VENDOR_URL_BUG if '--with-vendor-bug-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
# Otherwise we will use the value from "version-numbers" included above.
VENDOR_URL_BUG="$with_vendor_bug_url"
fi
AC_SUBST(VENDOR_URL_BUG)
@@ -138,7 +164,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_MSG_ERROR([--with-vendor-vm-bug-url contains non-printing characters: $with_vendor_vm_bug_url])
elif test "x$with_vendor_vm_bug_url" != x; then
# Only set VENDOR_URL_VM_BUG if '--with-vendor-vm-bug-url' was used and is not empty.
# Otherwise we will use the value from "branding.conf" included above.
# Otherwise we will use the value from "version-numbers" included above.
VENDOR_URL_VM_BUG="$with_vendor_vm_bug_url"
fi
AC_SUBST(VENDOR_URL_VM_BUG)
@@ -282,7 +308,7 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
fi
else
if test "x$NO_DEFAULT_VERSION_PARTS" != xtrue; then
# Default is to get value from version-numbers.conf
# Default is to get value from version-numbers
VERSION_FEATURE="$DEFAULT_VERSION_FEATURE"
fi
fi
@@ -476,64 +502,9 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
VENDOR_VERSION_STRING="$with_vendor_version_string"
fi
# Set the MACOSX Bundle Name base
AC_ARG_WITH(macosx-bundle-name-base, [AS_HELP_STRING([--with-macosx-bundle-name-base],
[Set the MacOSX Bundle Name base. This is the base name for calculating MacOSX Bundle Names.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_name_base" = xyes; then
AC_MSG_ERROR([--with-macosx-bundle-name-base must have a value])
elif [ ! [[ $with_macosx_bundle_name_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-name-base contains non-printing characters: $with_macosx_bundle_name_base])
elif test "x$with_macosx_bundle_name_base" != x; then
# Set MACOSX_BUNDLE_NAME_BASE to the configured value.
MACOSX_BUNDLE_NAME_BASE="$with_macosx_bundle_name_base"
fi
AC_SUBST(MACOSX_BUNDLE_NAME_BASE)
# Set the MACOSX Bundle ID base
AC_ARG_WITH(macosx-bundle-id-base, [AS_HELP_STRING([--with-macosx-bundle-id-base],
[Set the MacOSX Bundle ID base. This is the base ID for calculating MacOSX Bundle IDs.
@<:@not specified@:>@])])
if test "x$with_macosx_bundle_id_base" = xyes; then
AC_MSG_ERROR([--with-macosx-bundle-id-base must have a value])
elif [ ! [[ $with_macosx_bundle_id_base =~ ^[[:print:]]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-id-base contains non-printing characters: $with_macosx_bundle_id_base])
elif test "x$with_macosx_bundle_id_base" != x; then
# Set MACOSX_BUNDLE_ID_BASE to the configured value.
MACOSX_BUNDLE_ID_BASE="$with_macosx_bundle_id_base"
else
# If using the default value, append the VERSION_PRE if there is one
# to make it possible to tell official builds apart from developer builds
if test "x$VERSION_PRE" != x; then
MACOSX_BUNDLE_ID_BASE="$MACOSX_BUNDLE_ID_BASE-$VERSION_PRE"
fi
fi
AC_SUBST(MACOSX_BUNDLE_ID_BASE)
# Set the MACOSX CFBundleVersion field
AC_ARG_WITH(macosx-bundle-build-version, [AS_HELP_STRING([--with-macosx-bundle-build-version],
[Set the MacOSX Bundle CFBundleVersion field. This key is a machine-readable
string composed of one to three period-separated integers and should represent the
build version. Defaults to the build number.])])
if test "x$with_macosx_bundle_build_version" = xyes; then
AC_MSG_ERROR([--with-macosx-bundle-build-version must have a value])
elif [ ! [[ $with_macosx_bundle_build_version =~ ^[0-9\.]*$ ]] ]; then
AC_MSG_ERROR([--with-macosx-bundle-build-version contains non numbers and periods: $with_macosx_bundle_build_version])
elif test "x$with_macosx_bundle_build_version" != x; then
MACOSX_BUNDLE_BUILD_VERSION="$with_macosx_bundle_build_version"
else
MACOSX_BUNDLE_BUILD_VERSION="$VERSION_BUILD"
# If VERSION_OPT consists of only numbers and periods, add it.
if [ [[ $VERSION_OPT =~ ^[0-9\.]+$ ]] ]; then
MACOSX_BUNDLE_BUILD_VERSION+=".$VERSION_OPT"
fi
fi
AC_SUBST(MACOSX_BUNDLE_BUILD_VERSION)
# We could define --with flags for these, if really needed
VERSION_CLASSFILE_MAJOR="$DEFAULT_VERSION_CLASSFILE_MAJOR"
VERSION_CLASSFILE_MINOR="$DEFAULT_VERSION_CLASSFILE_MINOR"
VERSION_DOCS_API_SINCE="$DEFAULT_VERSION_DOCS_API_SINCE"
JDK_SOURCE_TARGET_VERSION="$DEFAULT_JDK_SOURCE_TARGET_VERSION"
AC_MSG_CHECKING([for version string])
@@ -558,6 +529,5 @@ AC_DEFUN_ONCE([JDKVER_SETUP_JDK_VERSION_NUMBERS],
AC_SUBST(VENDOR_VERSION_STRING)
AC_SUBST(VERSION_CLASSFILE_MAJOR)
AC_SUBST(VERSION_CLASSFILE_MINOR)
AC_SUBST(VERSION_DOCS_API_SINCE)
AC_SUBST(JDK_SOURCE_TARGET_VERSION)
])

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -44,7 +44,7 @@
m4_define(jvm_features_valid, m4_normalize( \
ifdef([custom_jvm_features_valid], custom_jvm_features_valid) \
\
cds compiler1 compiler2 dtrace epsilongc g1gc jfr jni-check \
aot cds compiler1 compiler2 dtrace epsilongc g1gc graal jfr jni-check \
jvmci jvmti link-time-opt management minimal nmt opt-size parallelgc \
serialgc services shenandoahgc static-build vm-structs zero zgc \
))
@@ -55,12 +55,14 @@ m4_define(jvm_features_deprecated, m4_normalize(
))
# Feature descriptions
m4_define(jvm_feature_desc_aot, [enable ahead of time compilation (AOT)])
m4_define(jvm_feature_desc_cds, [enable class data sharing (CDS)])
m4_define(jvm_feature_desc_compiler1, [enable hotspot compiler C1])
m4_define(jvm_feature_desc_compiler2, [enable hotspot compiler C2])
m4_define(jvm_feature_desc_dtrace, [enable dtrace support])
m4_define(jvm_feature_desc_epsilongc, [include the epsilon (no-op) garbage collector])
m4_define(jvm_feature_desc_g1gc, [include the G1 garbage collector])
m4_define(jvm_feature_desc_graal, [enable Graal (jdk.internal.vm.compiler)])
m4_define(jvm_feature_desc_jfr, [enable JDK Flight Recorder (JFR)])
m4_define(jvm_feature_desc_jni_check, [enable -Xcheck:jni support])
m4_define(jvm_feature_desc_jvmci, [enable JVM Compiler Interface (JVMCI)])
@@ -92,6 +94,7 @@ AC_DEFUN_ONCE([JVM_FEATURES_PARSE_OPTIONS],
# For historical reasons, some jvm features have their own, shorter names.
# Keep those as aliases for the --enable-jvm-feature-* style arguments.
UTIL_ALIASED_ARG_ENABLE(aot, --enable-jvm-feature-aot)
UTIL_ALIASED_ARG_ENABLE(cds, --enable-jvm-feature-cds)
UTIL_ALIASED_ARG_ENABLE(dtrace, --enable-jvm-feature-dtrace)
@@ -156,7 +159,7 @@ AC_DEFUN_ONCE([JVM_FEATURES_PARSE_OPTIONS],
# Likewise, check for deprecated arguments.
m4_foreach(FEATURE, m4_split(jvm_features_deprecated), [
AC_ARG_ENABLE(jvm-feature-FEATURE, AS_HELP_STRING(
[--enable-jvm-feature-FEATURE],
[--enable-jvm-feature-FEATURE],
[Deprecated. Option is kept for backwards compatibility and is ignored]))
m4_define(FEATURE_SHELL, [enable_jvm_feature_]m4_translit(FEATURE, -, _))
@@ -226,6 +229,33 @@ AC_DEFUN([JVM_FEATURES_CHECK_AVAILABILITY],
fi
])
###############################################################################
# Check if the feature 'aot' is available on this platform.
#
AC_DEFUN_ONCE([JVM_FEATURES_CHECK_AOT],
[
JVM_FEATURES_CHECK_AVAILABILITY(aot, [
AC_MSG_CHECKING([if platform is supported by AOT])
# AOT is only available where JVMCI is available since it requires JVMCI.
if test "x$OPENJDK_TARGET_CPU" = "xx86_64" || \
test "x$OPENJDK_TARGET_CPU" = "xaarch64"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_CPU])
AVAILABLE=false
fi
AC_MSG_CHECKING([if AOT source code is present])
if test -e "${TOPDIR}/src/jdk.internal.vm.compiler" && \
test -e "${TOPDIR}/src/jdk.aot"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, missing src/jdk.internal.vm.compiler or src/jdk.aot])
AVAILABLE=false
fi
])
])
###############################################################################
# Check if the feature 'cds' is available on this platform.
#
@@ -233,11 +263,11 @@ AC_DEFUN_ONCE([JVM_FEATURES_CHECK_CDS],
[
JVM_FEATURES_CHECK_AVAILABILITY(cds, [
AC_MSG_CHECKING([if platform is supported by CDS])
if test "x$OPENJDK_TARGET_OS" = xaix; then
AC_MSG_RESULT([no, $OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU])
AVAILABLE=false
else
if test "x$OPENJDK_TARGET_OS" != xaix; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_OS])
AVAILABLE=false
fi
])
])
@@ -265,6 +295,24 @@ AC_DEFUN_ONCE([JVM_FEATURES_CHECK_DTRACE],
])
])
###############################################################################
# Check if the feature 'graal' is available on this platform.
#
AC_DEFUN_ONCE([JVM_FEATURES_CHECK_GRAAL],
[
JVM_FEATURES_CHECK_AVAILABILITY(graal, [
AC_MSG_CHECKING([if platform is supported by Graal])
# Graal is only available where JVMCI is available since it requires JVMCI.
if test "x$OPENJDK_TARGET_CPU" = "xx86_64" || \
test "x$OPENJDK_TARGET_CPU" = "xaarch64" ; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_CPU])
AVAILABLE=false
fi
])
])
###############################################################################
# Check if the feature 'jfr' is available on this platform.
#
@@ -288,9 +336,8 @@ AC_DEFUN_ONCE([JVM_FEATURES_CHECK_JVMCI],
[
JVM_FEATURES_CHECK_AVAILABILITY(jvmci, [
AC_MSG_CHECKING([if platform is supported by JVMCI])
if test "x$OPENJDK_TARGET_CPU" = "xx86_64"; then
AC_MSG_RESULT([yes])
elif test "x$OPENJDK_TARGET_CPU" = "xaarch64"; then
if test "x$OPENJDK_TARGET_CPU" = "xx86_64" || \
test "x$OPENJDK_TARGET_CPU" = "xaarch64" ; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_CPU])
@@ -348,15 +395,8 @@ AC_DEFUN_ONCE([JVM_FEATURES_CHECK_ZGC],
AC_MSG_RESULT([no, $OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU])
AVAILABLE=false
fi
elif test "x$OPENJDK_TARGET_CPU" = "xaarch64"; then
if test "x$OPENJDK_TARGET_OS" = "xlinux" || \
test "x$OPENJDK_TARGET_OS" = "xwindows" || \
test "x$OPENJDK_TARGET_OS" = "xmacosx"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU])
AVAILABLE=false
fi
elif test "x$OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU" = "xlinux-aarch64"; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no, $OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU])
AVAILABLE=false
@@ -390,8 +430,10 @@ AC_DEFUN_ONCE([JVM_FEATURES_PREPARE_PLATFORM],
# The checks below should add unavailable features to
# JVM_FEATURES_PLATFORM_UNAVAILABLE.
JVM_FEATURES_CHECK_AOT
JVM_FEATURES_CHECK_CDS
JVM_FEATURES_CHECK_DTRACE
JVM_FEATURES_CHECK_GRAAL
JVM_FEATURES_CHECK_JFR
JVM_FEATURES_CHECK_JVMCI
JVM_FEATURES_CHECK_SHENANDOAHGC
@@ -424,18 +466,18 @@ AC_DEFUN([JVM_FEATURES_PREPARE_VARIANT],
elif test "x$variant" = "xcore"; then
JVM_FEATURES_VARIANT_UNAVAILABLE="cds minimal zero"
elif test "x$variant" = "xzero"; then
JVM_FEATURES_VARIANT_UNAVAILABLE="cds compiler1 compiler2 \
jvmci minimal zgc"
JVM_FEATURES_VARIANT_UNAVAILABLE="aot cds compiler1 compiler2 \
epsilongc g1gc graal jvmci minimal shenandoahgc zgc"
else
JVM_FEATURES_VARIANT_UNAVAILABLE="minimal zero"
fi
# Check which features should be off by default for this JVM variant.
if test "x$variant" = "xclient"; then
JVM_FEATURES_VARIANT_FILTER="compiler2 jvmci link-time-opt opt-size"
JVM_FEATURES_VARIANT_FILTER="aot compiler2 graal jvmci link-time-opt opt-size"
elif test "x$variant" = "xminimal"; then
JVM_FEATURES_VARIANT_FILTER="cds compiler2 dtrace epsilongc g1gc \
jfr jni-check jvmci jvmti management nmt parallelgc services \
JVM_FEATURES_VARIANT_FILTER="aot cds compiler2 dtrace epsilongc g1gc \
graal jfr jni-check jvmci jvmti management nmt parallelgc services \
shenandoahgc vm-structs zgc"
if test "x$OPENJDK_TARGET_CPU" = xarm ; then
JVM_FEATURES_VARIANT_FILTER="$JVM_FEATURES_VARIANT_FILTER opt-size"
@@ -445,7 +487,7 @@ AC_DEFUN([JVM_FEATURES_PREPARE_VARIANT],
link-time-opt"
fi
elif test "x$variant" = "xcore"; then
JVM_FEATURES_VARIANT_FILTER="compiler1 compiler2 jvmci \
JVM_FEATURES_VARIANT_FILTER="aot compiler1 compiler2 graal jvmci \
link-time-opt opt-size"
elif test "x$variant" = "xzero"; then
JVM_FEATURES_VARIANT_FILTER="jfr link-time-opt opt-size"
@@ -521,6 +563,15 @@ AC_DEFUN([JVM_FEATURES_VERIFY],
[
variant=$1
# Verify that dependencies are met for inter-feature relations.
if JVM_FEATURES_IS_ACTIVE(aot) && ! JVM_FEATURES_IS_ACTIVE(graal); then
AC_MSG_ERROR([Specified JVM feature 'aot' requires feature 'graal' for variant '$variant'])
fi
if JVM_FEATURES_IS_ACTIVE(graal) && ! JVM_FEATURES_IS_ACTIVE(jvmci); then
AC_MSG_ERROR([Specified JVM feature 'graal' requires feature 'jvmci' for variant '$variant'])
fi
if JVM_FEATURES_IS_ACTIVE(jvmci) && ! (JVM_FEATURES_IS_ACTIVE(compiler1) || \
JVM_FEATURES_IS_ACTIVE(compiler2)); then
AC_MSG_ERROR([Specified JVM feature 'jvmci' requires feature 'compiler2' or 'compiler1' for variant '$variant'])
@@ -536,15 +587,18 @@ AC_DEFUN([JVM_FEATURES_VERIFY],
# For backwards compatibility, disable a feature "globally" if one variant
# is missing the feature.
if ! JVM_FEATURES_IS_ACTIVE(aot); then
ENABLE_AOT="false"
fi
if ! JVM_FEATURES_IS_ACTIVE(cds); then
ENABLE_CDS="false"
fi
if ! JVM_FEATURES_IS_ACTIVE(graal); then
INCLUDE_GRAAL="false"
fi
if ! JVM_FEATURES_IS_ACTIVE(jvmci); then
INCLUDE_JVMCI="false"
fi
if JVM_FEATURES_IS_ACTIVE(compiler2); then
INCLUDE_COMPILER2="true"
fi
# Verify that we have at least one gc selected (i.e., feature named "*gc").
if ! JVM_FEATURES_IS_ACTIVE(.*gc); then
@@ -566,9 +620,10 @@ AC_DEFUN_ONCE([JVM_FEATURES_SETUP],
# For backwards compatibility, tentatively enable these features "globally",
# and disable them in JVM_FEATURES_VERIFY if a variant is found that are
# missing any of them.
ENABLE_AOT="true"
ENABLE_CDS="true"
INCLUDE_GRAAL="true"
INCLUDE_JVMCI="true"
INCLUDE_COMPILER2="false"
for variant in $JVM_VARIANTS; do
# Figure out if any features are unavailable, or should be filtered out
@@ -604,7 +659,8 @@ AC_DEFUN_ONCE([JVM_FEATURES_SETUP],
AC_SUBST(JVM_FEATURES_zero)
AC_SUBST(JVM_FEATURES_custom)
AC_SUBST(ENABLE_AOT)
AC_SUBST(INCLUDE_GRAAL)
AC_SUBST(INCLUDE_JVMCI)
AC_SUBST(INCLUDE_COMPILER2)
])

View File

@@ -40,7 +40,6 @@ AC_DEFUN_ONCE([LIB_SETUP_BUNDLED_LIBS],
LIB_SETUP_LIBPNG
LIB_SETUP_ZLIB
LIB_SETUP_LCMS
LIB_SETUP_HARFBUZZ
])
################################################################################
@@ -264,43 +263,3 @@ AC_DEFUN_ONCE([LIB_SETUP_LCMS],
AC_SUBST(LCMS_CFLAGS)
AC_SUBST(LCMS_LIBS)
])
################################################################################
# Setup harfbuzz
################################################################################
AC_DEFUN_ONCE([LIB_SETUP_HARFBUZZ],
[
AC_ARG_WITH(harfbuzz, [AS_HELP_STRING([--with-harfbuzz],
[use harfbuzz from build system or OpenJDK source (system, bundled) @<:@bundled@:>@])])
AC_MSG_CHECKING([for which harfbuzz to use])
DEFAULT_HARFBUZZ=bundled
# If user didn't specify, use DEFAULT_HARFBUZZ
if test "x${with_harfbuzz}" = "x"; then
with_harfbuzz=${DEFAULT_HARFBUZZ}
fi
if test "x${with_harfbuzz}" = "xbundled"; then
USE_EXTERNAL_HARFBUZZ=false
HARFBUZZ_CFLAGS=""
HARFBUZZ_LIBS=""
AC_MSG_RESULT([bundled])
elif test "x${with_harfbuzz}" = "xsystem"; then
AC_MSG_RESULT([system])
PKG_CHECK_MODULES([HARFBUZZ], [harfbuzz], [HARFBUZZ_FOUND=yes], [HARFBUZZ_FOUND=no])
if test "x${HARFBUZZ_FOUND}" = "xyes"; then
# PKG_CHECK_MODULES will set HARFBUZZ_CFLAGS and HARFBUZZ_LIBS
USE_EXTERNAL_HARFBUZZ=true
else
HELP_MSG_MISSING_DEPENDENCY([harfbuzz])
AC_MSG_ERROR([--with-harfbuzz=system specified, but no harfbuzz found! $HELP_MSG])
fi
else
AC_MSG_ERROR([Invalid value for --with-harfbuzz: ${with_harfbuzz}, use 'system' or 'bundled'])
fi
AC_SUBST(USE_EXTERNAL_HARFBUZZ)
AC_SUBST(HARFBUZZ_CFLAGS)
AC_SUBST(HARFBUZZ_LIBS)
])

View File

@@ -192,16 +192,6 @@ AC_DEFUN_ONCE([LIB_SETUP_FREETYPE],
[$FREETYPE_BASE_DIR/lib], [well-known location])
fi
if test "x$FOUND_FREETYPE" != "xyes" ; then
LIB_CHECK_POTENTIAL_FREETYPE([$FREETYPE_BASE_DIR/include],
[$FREETYPE_BASE_DIR/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI], [well-known location])
fi
if test "x$FOUND_FREETYPE" != "xyes" ; then
LIB_CHECK_POTENTIAL_FREETYPE([$FREETYPE_BASE_DIR/include],
[$FREETYPE_BASE_DIR/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI], [well-known location])
fi
if test "x$FOUND_FREETYPE" != "xyes" ; then
FREETYPE_BASE_DIR="$SYSROOT/usr/X11"
LIB_CHECK_POTENTIAL_FREETYPE([$FREETYPE_BASE_DIR/include],

View File

@@ -70,7 +70,6 @@ AC_DEFUN_ONCE([LIB_SETUP_STD_LIBS],
else
LIBCXX="$LIBCXX $STATIC_STDCXX_FLAGS"
JVM_LDFLAGS="$JVM_LDFLAGS $STATIC_STDCXX_FLAGS"
ADLC_LDFLAGS="$ADLC_LDFLAGS $STATIC_STDCXX_FLAGS"
# Ideally, we should test stdc++ for the BUILD toolchain separately. For now
# just use the same setting as for the TARGET toolchain.
OPENJDK_BUILD_JVM_LDFLAGS="$OPENJDK_BUILD_JVM_LDFLAGS $STATIC_STDCXX_FLAGS"

View File

@@ -23,6 +23,38 @@
# questions.
#
###############################################################################
#
# Check for graalunit libs, needed for running graalunit tests.
#
AC_DEFUN_ONCE([LIB_TESTS_SETUP_GRAALUNIT],
[
AC_ARG_WITH(graalunit-lib, [AS_HELP_STRING([--with-graalunit-lib],
[specify location of 3rd party libraries used by Graal unit tests])])
GRAALUNIT_LIB=
if test "x${with_graalunit_lib}" != x; then
AC_MSG_CHECKING([for graalunit libs])
if test "x${with_graalunit_lib}" = xno; then
AC_MSG_RESULT([disabled, graalunit tests can not be run])
elif test "x${with_graalunit_lib}" = xyes; then
AC_MSG_RESULT([not specified])
AC_MSG_ERROR([You must specify the path to 3rd party libraries used by Graal unit tests])
else
GRAALUNIT_LIB="${with_graalunit_lib}"
if test ! -d "${GRAALUNIT_LIB}"; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find graalunit 3rd party libraries as specified. (${with_graalunit_lib})])
else
AC_MSG_RESULT([$GRAALUNIT_LIB])
fi
fi
fi
UTIL_FIXUP_PATH([GRAALUNIT_LIB])
AC_SUBST(GRAALUNIT_LIB)
])
###############################################################################
#
# Setup and check for gtest framework source files

View File

@@ -68,10 +68,6 @@ AC_DEFUN_ONCE([LIB_SETUP_X11],
x_libraries="$SYSROOT/usr/lib64"
elif test -f "$SYSROOT/usr/lib/libX11.so"; then
x_libraries="$SYSROOT/usr/lib"
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"; then
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"
elif test -f "$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"; then
x_libraries="$SYSROOT/usr/lib/$OPENJDK_TARGET_CPU_AUTOCONF-$OPENJDK_TARGET_OS-$OPENJDK_TARGET_ABI/libX11.so"
fi
fi
fi

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -43,11 +43,9 @@ AC_DEFUN_ONCE([LIB_DETERMINE_DEPENDENCIES],
if test "x$OPENJDK_TARGET_OS" = xwindows || test "x$OPENJDK_TARGET_OS" = xmacosx; then
# No X11 support on windows or macosx
NEEDS_LIB_X11=false
elif test "x$ENABLE_HEADLESS_ONLY" = xtrue; then
# No X11 support needed when building headless only
NEEDS_LIB_X11=false
else
# All other instances need X11
# All other instances need X11, even if building headless only, libawt still
# needs X11 headers.
NEEDS_LIB_X11=true
fi
@@ -103,6 +101,7 @@ AC_DEFUN_ONCE([LIB_SETUP_LIBRARIES],
LIB_SETUP_LIBFFI
LIB_SETUP_BUNDLED_LIBS
LIB_SETUP_MISC_LIBS
LIB_TESTS_SETUP_GRAALUNIT
LIB_TESTS_SETUP_GTEST
BASIC_JDKLIB_LIBS=""
@@ -123,29 +122,6 @@ AC_DEFUN_ONCE([LIB_SETUP_LIBRARIES],
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -lpthread"
fi
# librt for legacy clock_gettime
if test "x$OPENJDK_TARGET_OS" = xlinux; then
# Hotspot needs to link librt to get the clock_* functions.
# But once our supported minimum build and runtime platform
# has glibc 2.17, this can be removed as the functions are
# in libc.
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -lrt"
fi
# Atomic library
# 32-bit platforms needs fallback library for 8-byte atomic ops on Zero
if HOTSPOT_CHECK_JVM_VARIANT(zero); then
if test "x$OPENJDK_TARGET_OS" = xlinux &&
(test "x$OPENJDK_TARGET_CPU" = xarm ||
test "x$OPENJDK_TARGET_CPU" = xm68k ||
test "x$OPENJDK_TARGET_CPU" = xmips ||
test "x$OPENJDK_TARGET_CPU" = xmipsel ||
test "x$OPENJDK_TARGET_CPU" = xppc ||
test "x$OPENJDK_TARGET_CPU" = xsh); then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -latomic"
fi
fi
# perfstat lib
if test "x$OPENJDK_TARGET_OS" = xaix; then
BASIC_JVM_LIBS="$BASIC_JVM_LIBS -lperfstat"

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -206,9 +206,9 @@ AC_DEFUN([PLATFORM_EXTRACT_VARS_FROM_OS],
VAR_OS=windows
VAR_OS_ENV=windows.wsl
;;
*msys*)
*mingw*)
VAR_OS=windows
VAR_OS_ENV=windows.msys2
VAR_OS_ENV=windows.msys
;;
*aix*)
VAR_OS=aix
@@ -220,51 +220,6 @@ AC_DEFUN([PLATFORM_EXTRACT_VARS_FROM_OS],
esac
])
# Support macro for PLATFORM_EXTRACT_TARGET_AND_BUILD.
# Converts autoconf style OS name to OpenJDK style, into
# VAR_LIBC.
AC_DEFUN([PLATFORM_EXTRACT_VARS_FROM_LIBC],
[
case "$1" in
*linux*-musl)
VAR_LIBC=musl
;;
*linux*-gnu)
VAR_LIBC=gnu
;;
*)
VAR_LIBC=default
;;
esac
])
# Support macro for PLATFORM_EXTRACT_TARGET_AND_BUILD.
# Converts autoconf style OS name to OpenJDK style, into
# VAR_ABI.
AC_DEFUN([PLATFORM_EXTRACT_VARS_FROM_ABI],
[
case "$1" in
*linux*-musl)
VAR_ABI=musl
;;
*linux*-gnu)
VAR_ABI=gnu
;;
*linux*-gnueabi)
VAR_ABI=gnueabi
;;
*linux*-gnueabihf)
VAR_ABI=gnueabihf
;;
*linux*-gnuabi64)
VAR_ABI=gnuabi64
;;
*)
VAR_ABI=default
;;
esac
])
# Expects $host_os $host_cpu $build_os and $build_cpu
# and $with_target_bits to have been setup!
#
@@ -282,11 +237,9 @@ AC_DEFUN([PLATFORM_EXTRACT_TARGET_AND_BUILD],
AC_SUBST(OPENJDK_TARGET_AUTOCONF_NAME)
AC_SUBST(OPENJDK_BUILD_AUTOCONF_NAME)
# Convert the autoconf OS/CPU value to our own data, into the VAR_OS/CPU/LIBC variables.
# Convert the autoconf OS/CPU value to our own data, into the VAR_OS/CPU variables.
PLATFORM_EXTRACT_VARS_FROM_OS($build_os)
PLATFORM_EXTRACT_VARS_FROM_CPU($build_cpu)
PLATFORM_EXTRACT_VARS_FROM_LIBC($build_os)
PLATFORM_EXTRACT_VARS_FROM_ABI($build_os)
# ..and setup our own variables. (Do this explicitly to facilitate searching)
OPENJDK_BUILD_OS="$VAR_OS"
if test "x$VAR_OS_TYPE" != x; then
@@ -303,9 +256,6 @@ AC_DEFUN([PLATFORM_EXTRACT_TARGET_AND_BUILD],
OPENJDK_BUILD_CPU_ARCH="$VAR_CPU_ARCH"
OPENJDK_BUILD_CPU_BITS="$VAR_CPU_BITS"
OPENJDK_BUILD_CPU_ENDIAN="$VAR_CPU_ENDIAN"
OPENJDK_BUILD_CPU_AUTOCONF="$build_cpu"
OPENJDK_BUILD_LIBC="$VAR_LIBC"
OPENJDK_BUILD_ABI="$VAR_ABI"
AC_SUBST(OPENJDK_BUILD_OS)
AC_SUBST(OPENJDK_BUILD_OS_TYPE)
AC_SUBST(OPENJDK_BUILD_OS_ENV)
@@ -313,23 +263,13 @@ AC_DEFUN([PLATFORM_EXTRACT_TARGET_AND_BUILD],
AC_SUBST(OPENJDK_BUILD_CPU_ARCH)
AC_SUBST(OPENJDK_BUILD_CPU_BITS)
AC_SUBST(OPENJDK_BUILD_CPU_ENDIAN)
AC_SUBST(OPENJDK_BUILD_CPU_AUTOCONF)
AC_SUBST(OPENJDK_BUILD_LIBC)
AC_SUBST(OPENJDK_BUILD_ABI)
AC_MSG_CHECKING([openjdk-build os-cpu])
AC_MSG_RESULT([$OPENJDK_BUILD_OS-$OPENJDK_BUILD_CPU])
if test "x$OPENJDK_BUILD_OS" = "xlinux"; then
AC_MSG_CHECKING([openjdk-build C library])
AC_MSG_RESULT([$OPENJDK_BUILD_LIBC])
fi
# Convert the autoconf OS/CPU value to our own data, into the VAR_OS/CPU/LIBC variables.
# Convert the autoconf OS/CPU value to our own data, into the VAR_OS/CPU variables.
PLATFORM_EXTRACT_VARS_FROM_OS($host_os)
PLATFORM_EXTRACT_VARS_FROM_CPU($host_cpu)
PLATFORM_EXTRACT_VARS_FROM_LIBC($host_os)
PLATFORM_EXTRACT_VARS_FROM_ABI($host_os)
# ... and setup our own variables. (Do this explicitly to facilitate searching)
OPENJDK_TARGET_OS="$VAR_OS"
if test "x$VAR_OS_TYPE" != x; then
@@ -346,10 +286,7 @@ AC_DEFUN([PLATFORM_EXTRACT_TARGET_AND_BUILD],
OPENJDK_TARGET_CPU_ARCH="$VAR_CPU_ARCH"
OPENJDK_TARGET_CPU_BITS="$VAR_CPU_BITS"
OPENJDK_TARGET_CPU_ENDIAN="$VAR_CPU_ENDIAN"
OPENJDK_TARGET_CPU_AUTOCONF="$host_cpu"
OPENJDK_TARGET_OS_UPPERCASE=`$ECHO $OPENJDK_TARGET_OS | $TR 'abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'`
OPENJDK_TARGET_LIBC="$VAR_LIBC"
OPENJDK_TARGET_ABI="$VAR_ABI"
AC_SUBST(OPENJDK_TARGET_OS)
AC_SUBST(OPENJDK_TARGET_OS_TYPE)
@@ -359,17 +296,9 @@ AC_DEFUN([PLATFORM_EXTRACT_TARGET_AND_BUILD],
AC_SUBST(OPENJDK_TARGET_CPU_ARCH)
AC_SUBST(OPENJDK_TARGET_CPU_BITS)
AC_SUBST(OPENJDK_TARGET_CPU_ENDIAN)
AC_SUBST(OPENJDK_TARGET_CPU_AUTOCONF)
AC_SUBST(OPENJDK_TARGET_LIBC)
AC_SUBST(OPENJDK_TARGET_ABI)
AC_MSG_CHECKING([openjdk-target os-cpu])
AC_MSG_RESULT([$OPENJDK_TARGET_OS-$OPENJDK_TARGET_CPU])
if test "x$OPENJDK_TARGET_OS" = "xlinux"; then
AC_MSG_CHECKING([openjdk-target C library])
AC_MSG_RESULT([$OPENJDK_TARGET_LIBC])
fi
])
# Check if a reduced build (32-bit on 64-bit platforms) is requested, and modify behaviour
@@ -480,11 +409,9 @@ AC_DEFUN([PLATFORM_SETUP_LEGACY_VARS_HELPER],
fi
# The new version string in JDK 9 also defined new naming of OS and ARCH for bundles
# The macOS bundle name was revised in JDK 17
#
# macosx is macos and x86_64 is x64
# Macosx is osx and x86_64 is x64
if test "x$OPENJDK_$1_OS" = xmacosx; then
OPENJDK_$1_OS_BUNDLE="macos"
OPENJDK_$1_OS_BUNDLE="osx"
else
OPENJDK_$1_OS_BUNDLE="$OPENJDK_TARGET_OS"
fi
@@ -493,13 +420,7 @@ AC_DEFUN([PLATFORM_SETUP_LEGACY_VARS_HELPER],
else
OPENJDK_$1_CPU_BUNDLE="$OPENJDK_$1_CPU"
fi
OPENJDK_$1_LIBC_BUNDLE=""
if test "x$OPENJDK_$1_LIBC" = "xmusl"; then
OPENJDK_$1_LIBC_BUNDLE="-$OPENJDK_$1_LIBC"
fi
OPENJDK_$1_BUNDLE_PLATFORM="${OPENJDK_$1_OS_BUNDLE}-${OPENJDK_$1_CPU_BUNDLE}${OPENJDK_$1_LIBC_BUNDLE}"
OPENJDK_$1_BUNDLE_PLATFORM="${OPENJDK_$1_OS_BUNDLE}-${OPENJDK_$1_CPU_BUNDLE}"
AC_SUBST(OPENJDK_$1_BUNDLE_PLATFORM)
if test "x$COMPILE_TYPE" = "xcross"; then
@@ -572,9 +493,6 @@ AC_DEFUN([PLATFORM_SETUP_LEGACY_VARS_HELPER],
fi
AC_SUBST(HOTSPOT_$1_CPU_DEFINE)
HOTSPOT_$1_LIBC=$OPENJDK_$1_LIBC
AC_SUBST(HOTSPOT_$1_LIBC)
# For historical reasons, the OS include directories have odd names.
OPENJDK_$1_OS_INCLUDE_SUBDIR="$OPENJDK_TARGET_OS"
if test "x$OPENJDK_TARGET_OS" = "xwindows"; then
@@ -600,11 +518,9 @@ AC_DEFUN([PLATFORM_SET_RELEASE_FILE_OS_VALUES],
RELEASE_FILE_OS_NAME="AIX"
fi
RELEASE_FILE_OS_ARCH=${OPENJDK_TARGET_CPU}
RELEASE_FILE_LIBC=${OPENJDK_TARGET_LIBC}
AC_SUBST(RELEASE_FILE_OS_NAME)
AC_SUBST(RELEASE_FILE_OS_ARCH)
AC_SUBST(RELEASE_FILE_LIBC)
])
AC_DEFUN([PLATFORM_SET_MODULE_TARGET_OS_VALUES],

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2017, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -28,6 +28,9 @@ AC_DEFUN_ONCE([SRCDIRS_SETUP_DIRS],
OUTPUTDIR="$OUTPUTDIR"
AC_SUBST(OUTPUTDIR)
JDK_OUTPUTDIR="$OUTPUTDIR/jdk"
# Where are the sources.
AC_SUBST(TOPDIR)
])
################################################################################

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -80,8 +80,6 @@ OPENJDK_TARGET_CPU_ARCH:=@OPENJDK_TARGET_CPU_ARCH@
OPENJDK_TARGET_CPU_BITS:=@OPENJDK_TARGET_CPU_BITS@
OPENJDK_TARGET_CPU_ENDIAN:=@OPENJDK_TARGET_CPU_ENDIAN@
OPENJDK_TARGET_LIBC:=@OPENJDK_TARGET_LIBC@
COMPILE_TYPE:=@COMPILE_TYPE@
# Legacy support
@@ -97,8 +95,6 @@ HOTSPOT_TARGET_CPU := @HOTSPOT_TARGET_CPU@
HOTSPOT_TARGET_CPU_ARCH := @HOTSPOT_TARGET_CPU_ARCH@
HOTSPOT_TARGET_CPU_DEFINE := @HOTSPOT_TARGET_CPU_DEFINE@
HOTSPOT_TARGET_LIBC := @HOTSPOT_TARGET_LIBC@
OPENJDK_TARGET_BUNDLE_PLATFORM:=@OPENJDK_TARGET_BUNDLE_PLATFORM@
JDK_ARCH_ABI_PROP_NAME := @JDK_ARCH_ABI_PROP_NAME@
@@ -113,8 +109,6 @@ OPENJDK_BUILD_CPU_ARCH:=@OPENJDK_BUILD_CPU_ARCH@
OPENJDK_BUILD_CPU_BITS:=@OPENJDK_BUILD_CPU_BITS@
OPENJDK_BUILD_CPU_ENDIAN:=@OPENJDK_BUILD_CPU_ENDIAN@
OPENJDK_BUILD_LIBC:=@OPENJDK_BUILD_LIBC@
OPENJDK_BUILD_OS_INCLUDE_SUBDIR:=@OPENJDK_TARGET_OS_INCLUDE_SUBDIR@
# Target platform value in ModuleTarget class file attribute.
@@ -123,7 +117,6 @@ OPENJDK_MODULE_TARGET_PLATFORM:=@OPENJDK_MODULE_TARGET_PLATFORM@
# OS_* properties in release file
RELEASE_FILE_OS_NAME:=@RELEASE_FILE_OS_NAME@
RELEASE_FILE_OS_ARCH:=@RELEASE_FILE_OS_ARCH@
RELEASE_FILE_LIBC:=@RELEASE_FILE_LIBC@
SOURCE_DATE := @SOURCE_DATE@
ENABLE_REPRODUCIBLE_BUILD := @ENABLE_REPRODUCIBLE_BUILD@
@@ -131,24 +124,20 @@ ENABLE_REPRODUCIBLE_BUILD := @ENABLE_REPRODUCIBLE_BUILD@
LIBM:=@LIBM@
LIBDL:=@LIBDL@
WINENV_ROOT := @WINENV_ROOT@
WINENV_PREFIX := @WINENV_PREFIX@
ifneq ($(findstring windows.wsl, @OPENJDK_BUILD_OS_ENV@), )
# Tell WSL to convert PATH between linux and windows
export WSLENV := PATH/l
else ifeq (@OPENJDK_BUILD_OS_ENV@, windows.msys2)
# Prohibit msys2 from attemping any path wrangling
export MSYS2_ARG_CONV_EXCL := "*"
endif
# colon or semicolon
PATH_SEP:=@PATH_SEP@
# Save the original path before replacing it with the Visual Studio tools
ORIGINAL_PATH := @ORIGINAL_PATH@
ifeq (@TOOLCHAIN_TYPE@, microsoft)
# The Visual Studio toolchain needs the PATH to be adjusted to include
# Visual Studio tools.
export PATH := @TOOLCHAIN_PATH@:$(PATH)
ORIGINAL_PATH:=@ORIGINAL_PATH@
ifeq ($(OPENJDK_TARGET_OS), windows)
# On Windows, the Visual Studio toolchain needs the PATH to be adjusted
# to include Visual Studio tools (this needs to be in cygwin/msys style).
ifeq ($(OPENJDK_TARGET_OS_ENV), windows.wsl)
export FIXPATH_PATH:=@VS_PATH_WINDOWS@
export WSLENV:=$(WSLENV):FIXPATH_PATH:DEBUG_FIXPATH
else
export PATH:=@VS_PATH@
endif
endif
SYSROOT_CFLAGS := @SYSROOT_CFLAGS@
@@ -181,7 +170,6 @@ COMPANY_NAME:=@COMPANY_NAME@
HOTSPOT_VM_DISTRO:=@HOTSPOT_VM_DISTRO@
MACOSX_BUNDLE_NAME_BASE=@MACOSX_BUNDLE_NAME_BASE@
MACOSX_BUNDLE_ID_BASE=@MACOSX_BUNDLE_ID_BASE@
MACOSX_BUNDLE_BUILD_VERSION=@MACOSX_BUNDLE_BUILD_VERSION@
USERNAME:=@USERNAME@
VENDOR_URL:=@VENDOR_URL@
VENDOR_URL_BUG:=@VENDOR_URL_BUG@
@@ -230,9 +218,6 @@ VENDOR_VERSION_STRING := @VENDOR_VERSION_STRING@
VERSION_CLASSFILE_MAJOR := @VERSION_CLASSFILE_MAJOR@
VERSION_CLASSFILE_MINOR := @VERSION_CLASSFILE_MINOR@
# Version for API docs "new-since" feature
VERSION_DOCS_API_SINCE := @VERSION_DOCS_API_SINCE@
JDK_SOURCE_TARGET_VERSION := @JDK_SOURCE_TARGET_VERSION@
# Convenience CFLAGS settings for passing version information into native programs.
@@ -258,7 +243,7 @@ VERSION_CFLAGS := \
#
ifneq ($(COMPANY_NAME),)
# COMPANY_NAME is set to "N/A" in make/conf/branding.conf by default,
# COMPANY_NAME is set to "N/A" in $AUTOCONF_DIR/version-numbers by default,
# but can be customized with the '--with-vendor-name' configure option.
# Only export "VENDOR" to the build if COMPANY_NAME contains a real value.
# Otherwise the default value for VENDOR, which is used to set the "java.vendor"
@@ -352,8 +337,6 @@ BUILD_MANPAGES := @BUILD_MANPAGES@
BUILD_CDS_ARCHIVE := @BUILD_CDS_ARCHIVE@
ENABLE_COMPATIBLE_CDS_ALIGNMENT := @ENABLE_COMPATIBLE_CDS_ALIGNMENT@
ALLOW_ABSOLUTE_PATHS_IN_OUTPUT := @ALLOW_ABSOLUTE_PATHS_IN_OUTPUT@
# The boot jdk to use. This is overridden in bootcycle-spec.gmk. Make sure to keep
@@ -395,6 +378,7 @@ LIBFFI_LIBS:=@LIBFFI_LIBS@
LIBFFI_CFLAGS:=@LIBFFI_CFLAGS@
ENABLE_LIBFFI_BUNDLING:=@ENABLE_LIBFFI_BUNDLING@
LIBFFI_LIB_FILE:=@LIBFFI_LIB_FILE@
GRAALUNIT_LIB := @GRAALUNIT_LIB@
FILE_MACRO_CFLAGS := @FILE_MACRO_CFLAGS@
STATIC_LIBS_CFLAGS := @STATIC_LIBS_CFLAGS@
@@ -479,7 +463,8 @@ CXX_O_FLAG_NORM:=@CXX_O_FLAG_NORM@
CXX_O_FLAG_NONE:=@CXX_O_FLAG_NONE@
CXX_O_FLAG_SIZE:=@CXX_O_FLAG_SIZE@
GENDEPS_FLAGS := @GENDEPS_FLAGS@
C_FLAG_DEPS:=@C_FLAG_DEPS@
CXX_FLAG_DEPS:=@CXX_FLAG_DEPS@
DISABLE_WARNING_PREFIX := @DISABLE_WARNING_PREFIX@
CFLAGS_WARNINGS_ARE_ERRORS:=@CFLAGS_WARNINGS_ARE_ERRORS@
@@ -491,11 +476,10 @@ DISABLED_WARNINGS_CXX := @DISABLED_WARNINGS_CXX@
WARNINGS_AS_ERRORS := @WARNINGS_AS_ERRORS@
CFLAGS_CCACHE:=@CFLAGS_CCACHE@
ADLC_LANGSTD_CXXFLAGS=@ADLC_LANGSTD_CXXFLAGS@
ADLC_LDFLAGS=@ADLC_LDFLAGS@
ADLC_CXXFLAG=@ADLC_CXXFLAG@
# Tools that potentially need to be cross compilation aware.
CC := @CCACHE@ @ICECC@ @CC@
CC:=@FIXPATH@ @CCACHE@ @ICECC@ @CC@
# CFLAGS used to compile the jdk native libraries (C-code)
CFLAGS_JDKLIB:=@CFLAGS_JDKLIB@
@@ -511,20 +495,21 @@ JVM_LDFLAGS := @JVM_LDFLAGS@
JVM_ASFLAGS := @JVM_ASFLAGS@
JVM_LIBS := @JVM_LIBS@
BASIC_ASFLAGS := @BASIC_ASFLAGS@
# These flags might contain variables set by a custom extension that is included later.
EXTRA_CFLAGS = @EXTRA_CFLAGS@
EXTRA_CXXFLAGS = @EXTRA_CXXFLAGS@
EXTRA_LDFLAGS = @EXTRA_LDFLAGS@
EXTRA_ASFLAGS = @EXTRA_ASFLAGS@
CXX := @CCACHE@ @ICECC@ @CXX@
CXX:=@FIXPATH@ @CCACHE@ @ICECC@ @CXX@
CPP := @CPP@
CPP:=@FIXPATH@ @CPP@
# The linker can be gcc or ld on unix systems, or link.exe on windows systems.
LD := @LD@
LD:=@FIXPATH@ @LD@
# Linker used by the jaotc tool for AOT compilation.
LD_JAOTC:=@LD_JAOTC@
# Xcode SDK path
SDKROOT:=@SDKROOT@
@@ -541,7 +526,7 @@ JDKEXE_LIBS:=@JDKEXE_LIBS@
LDFLAGS_CXX_JDK:=@LDFLAGS_CXX_JDK@
# Sometimes a different linker is needed for c++ libs
LDCXX := @LDCXX@
LDCXX:=@FIXPATH@ @LDCXX@
# The flags for linking libstdc++ linker.
LIBCXX:=@LIBCXX@
@@ -550,22 +535,22 @@ LDFLAGS_TESTEXE:=@LDFLAGS_TESTEXE@
# BUILD_CC/BUILD_LD is a compiler/linker that generates code that is runnable on the
# build platform.
BUILD_CC := @BUILD_ICECC@ @BUILD_CC@
BUILD_CXX := @BUILD_ICECC@ @BUILD_CXX@
BUILD_LD := @BUILD_LD@
BUILD_LDCXX := @BUILD_LDCXX@
BUILD_AS := @BUILD_AS@
BUILD_AR := @BUILD_AR@
BUILD_NM := @BUILD_NM@
BUILD_CC:=@FIXPATH@ @BUILD_ICECC@ @BUILD_CC@
BUILD_CXX:=@FIXPATH@ @BUILD_ICECC@ @BUILD_CXX@
BUILD_LD:=@FIXPATH@ @BUILD_LD@
BUILD_LDCXX:=@FIXPATH@ @BUILD_LDCXX@
BUILD_AS:=@FIXPATH@ @BUILD_AS@
BUILD_AR:=@FIXPATH@ @BUILD_AR@
BUILD_NM:=@FIXPATH@ @BUILD_NM@
BUILD_OBJCOPY:=@BUILD_OBJCOPY@
BUILD_STRIP:=@BUILD_STRIP@
BUILD_SYSROOT_CFLAGS:=@BUILD_SYSROOT_CFLAGS@
BUILD_SYSROOT_LDFLAGS:=@BUILD_SYSROOT_LDFLAGS@
AS := @AS@
AS:=@FIXPATH@ @AS@
# AR is used to create a static library (is ar in unix, lib.exe in windows)
AR := @AR@
AR:=@FIXPATH@ @AR@
ARFLAGS:=@ARFLAGS@
NM:=@NM@
@@ -576,8 +561,6 @@ CXXFILT:=@CXXFILT@
LIPO:=@LIPO@
INSTALL_NAME_TOOL:=@INSTALL_NAME_TOOL@
METAL := @METAL@
METALLIB := @METALLIB@
# Options to linker to specify a mapfile.
# (Note absence of := assignment, because we do not want to evaluate the macro body here)
@@ -616,7 +599,7 @@ STATIC_LIBRARY=@STATIC_LIBRARY@
LIBRARY_PREFIX:=@LIBRARY_PREFIX@
SHARED_LIBRARY_SUFFIX:=@SHARED_LIBRARY_SUFFIX@
STATIC_LIBRARY_SUFFIX:=@STATIC_LIBRARY_SUFFIX@
EXECUTABLE_SUFFIX:=@EXECUTABLE_SUFFIX@
EXE_SUFFIX:=@EXE_SUFFIX@
OBJ_SUFFIX:=@OBJ_SUFFIX@
STATIC_BUILD:=@STATIC_BUILD@
@@ -637,23 +620,23 @@ JAVADOC_CMD:=@JAVADOC@
JAR_CMD:=@JAR@
JLINK_CMD := @JLINK@
JMOD_CMD := @JMOD@
JARSIGNER_CMD:=@JARSIGNER@
# These variables are meant to be used. They are defined with = instead of := to make
# it possible to override only the *_CMD variables.
JAVA = $(JAVA_CMD) $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
JAVA_SMALL = $(JAVA_CMD) $(JAVA_FLAGS_SMALL) $(JAVA_FLAGS)
JAVAC = $(JAVAC_CMD)
JAVADOC = $(JAVADOC_CMD)
JAR = $(JAR_CMD)
JLINK = $(JLINK_CMD)
JMOD = $(JMOD_CMD) $(JAVA_TOOL_FLAGS_SMALL)
JAVA=@FIXPATH@ $(JAVA_CMD) $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
JAVA_SMALL=@FIXPATH@ $(JAVA_CMD) $(JAVA_FLAGS_SMALL) $(JAVA_FLAGS)
JAVA_DETACH =@FIXPATH@ @FIXPATH_DETACH_FLAG@ $(JAVA_CMD) $(JAVA_FLAGS_BIG) $(JAVA_FLAGS)
JAVAC=@FIXPATH@ $(JAVAC_CMD)
JAVADOC=@FIXPATH@ $(JAVADOC_CMD)
JAR=@FIXPATH@ $(JAR_CMD)
JLINK = @FIXPATH@ $(JLINK_CMD)
JMOD = @FIXPATH@ $(JMOD_CMD) $(JAVA_TOOL_FLAGS_SMALL)
JARSIGNER=@FIXPATH@ $(JARSIGNER_CMD)
BUILD_JAVA_FLAGS := @BOOTCYCLE_JVM_ARGS_BIG@
BUILD_JAVA=@FIXPATH@ $(BUILD_JDK)/bin/java $(BUILD_JAVA_FLAGS)
BUILD_JAVAC=@FIXPATH@ $(BUILD_JDK)/bin/javac
BUILD_JAR=@FIXPATH@ $(BUILD_JDK)/bin/jar
DOCS_REFERENCE_JAVADOC := @DOCS_REFERENCE_JAVADOC@
# Interim langtools modules and arguments
INTERIM_LANGTOOLS_BASE_MODULES := java.compiler jdk.compiler jdk.javadoc
INTERIM_LANGTOOLS_MODULES := $(addsuffix .interim, $(INTERIM_LANGTOOLS_BASE_MODULES))
@@ -662,8 +645,6 @@ INTERIM_LANGTOOLS_ADD_EXPORTS := \
--add-exports java.base/jdk.internal.jmod=jdk.compiler.interim \
--add-exports java.base/jdk.internal.misc=jdk.compiler.interim \
--add-exports java.base/sun.invoke.util=jdk.compiler.interim \
--add-exports java.base/jdk.internal.javac=java.compiler.interim \
--add-exports java.base/jdk.internal.javac=jdk.compiler.interim \
#
INTERIM_LANGTOOLS_MODULES_COMMA := $(strip $(subst $(SPACE),$(COMMA),$(strip \
$(INTERIM_LANGTOOLS_MODULES))))
@@ -671,7 +652,6 @@ INTERIM_LANGTOOLS_ARGS := \
--limit-modules java.base,jdk.zipfs,$(INTERIM_LANGTOOLS_MODULES_COMMA) \
--add-modules $(INTERIM_LANGTOOLS_MODULES_COMMA) \
--module-path $(BUILDTOOLS_OUTPUTDIR)/interim_langtools_modules \
--patch-module java.base=$(BUILDTOOLS_OUTPUTDIR)/gensrc/java.base.interim \
$(INTERIM_LANGTOOLS_ADD_EXPORTS) \
#
JAVAC_MAIN_CLASS = -m jdk.compiler.interim/com.sun.tools.javac.Main
@@ -695,7 +675,9 @@ CCACHE:=@CCACHE@
CD:=cd
CHMOD:=@CHMOD@
CODESIGN:=@CODESIGN@
COMM:=@COMM@
CP:=@CP@
CPIO:=@CPIO@
CUT:=@CUT@
DATE:=@DATE@
DIFF:=@DIFF@
@@ -715,8 +697,9 @@ LN:=@LN@
MIG:=@MIG@
MKDIR:=@MKDIR@
MV:=@MV@
NAWK:=@NAWK@
NICE:=@NICE@
PANDOC:=@PANDOC@
PANDOC:=@FIXPATH@ @PANDOC@
PATCH:=@PATCH@
PRINTF:=@PRINTF@
READLINK:=@READLINK@
@@ -732,14 +715,15 @@ TIME:=@TIME@
IS_GNU_TIME:=@IS_GNU_TIME@
TR:=@TR@
TOUCH:=@TOUCH@
UNIQ:=@UNIQ@
WC:=@WC@
XARGS:=@XARGS@
ZIPEXE:=@ZIPEXE@
UNZIP:=@UNZIP@
MT:=@MT@
RC:=@RC@
DUMPBIN:=@DUMPBIN@
PATHTOOL:=@PATHTOOL@
MT:=@FIXPATH@ @MT@
RC:=@FIXPATH@ @RC@
DUMPBIN:=@FIXPATH@ @DUMPBIN@
CYGPATH:=@CYGPATH@
WSLPATH:=@WSLPATH@
LDD:=@LDD@
OTOOL:=@OTOOL@
@@ -756,8 +740,7 @@ JT_HOME:=@JT_HOME@
JIB_HOME:=@JIB_HOME@
XCODEBUILD=@XCODEBUILD@
DTRACE := @DTRACE@
FIXPATH := @FIXPATH@
FIXPATH_BASE := @FIXPATH_BASE@
FIXPATH:=@FIXPATH@
ULIMIT:=@ULIMIT@
TAR_TYPE:=@TAR_TYPE@
@@ -765,6 +748,8 @@ TAR_INCLUDE_PARAM:=@TAR_INCLUDE_PARAM@
TAR_SUPPORTS_TRANSFORM:=@TAR_SUPPORTS_TRANSFORM@
# Build setup
ENABLE_AOT:=@ENABLE_AOT@
ENABLE_INTREE_EC:=@ENABLE_INTREE_EC@
USE_EXTERNAL_LIBJPEG:=@USE_EXTERNAL_LIBJPEG@
USE_EXTERNAL_LIBGIF:=@USE_EXTERNAL_LIBGIF@
USE_EXTERNAL_LIBZ:=@USE_EXTERNAL_LIBZ@
@@ -838,10 +823,6 @@ USE_EXTERNAL_LCMS:=@USE_EXTERNAL_LCMS@
LCMS_CFLAGS:=@LCMS_CFLAGS@
LCMS_LIBS:=@LCMS_LIBS@
USE_EXTERNAL_HARFBUZZ:=@USE_EXTERNAL_HARFBUZZ@
HARFBUZZ_CFLAGS:=@HARFBUZZ_CFLAGS@
HARFBUZZ_LIBS:=@HARFBUZZ_LIBS@
USE_EXTERNAL_LIBPNG:=@USE_EXTERNAL_LIBPNG@
PNG_LIBS:=@PNG_LIBS@
PNG_CFLAGS:=@PNG_CFLAGS@
@@ -852,8 +833,8 @@ PNG_CFLAGS:=@PNG_CFLAGS@
#
INCLUDE_SA=@INCLUDE_SA@
INCLUDE_GRAAL=@INCLUDE_GRAAL@
INCLUDE_JVMCI=@INCLUDE_JVMCI@
INCLUDE_COMPILER2=@INCLUDE_COMPILER2@
OS_VERSION_MAJOR:=@OS_VERSION_MAJOR@
OS_VERSION_MINOR:=@OS_VERSION_MINOR@
@@ -882,14 +863,10 @@ INTERIM_JMODS_DIR := $(SUPPORT_OUTPUTDIR)/interim-jmods
INTERIM_IMAGE_DIR := $(SUPPORT_OUTPUTDIR)/interim-image
# Docs image
DOCS_JDK_IMAGE_SUBDIR := docs
DOCS_JDK_IMAGE_DIR = $(IMAGES_OUTPUTDIR)/$(DOCS_JDK_IMAGE_SUBDIR)
DOCS_JAVASE_IMAGE_SUBDIR := docs-javase
DOCS_JAVASE_IMAGE_DIR = $(IMAGES_OUTPUTDIR)/$(DOCS_JAVASE_IMAGE_SUBDIR)
DOCS_REFERENCE_IMAGE_SUBDIR := docs-reference
DOCS_REFERENCE_IMAGE_DIR = $(IMAGES_OUTPUTDIR)/$(DOCS_REFERENCE_IMAGE_SUBDIR)
DOCS_IMAGE_SUBDIR := docs
DOCS_IMAGE_DIR = $(IMAGES_OUTPUTDIR)/$(DOCS_IMAGE_SUBDIR)
# Output docs directly into image
DOCS_OUTPUTDIR := $(DOCS_JDK_IMAGE_DIR)
DOCS_OUTPUTDIR := $(DOCS_IMAGE_DIR)
# Static libs image
STATIC_LIBS_IMAGE_SUBDIR := static-libs
@@ -932,9 +909,7 @@ JRE_BUNDLE_NAME := jre-$(BASE_NAME)_bin$(DEBUG_PART).$(JDK_BUNDLE_EXTENSION)
JDK_SYMBOLS_BUNDLE_NAME := jdk-$(BASE_NAME)_bin$(DEBUG_PART)-symbols.tar.gz
TEST_DEMOS_BUNDLE_NAME := jdk-$(BASE_NAME)_bin-tests-demos$(DEBUG_PART).tar.gz
TEST_BUNDLE_NAME := jdk-$(BASE_NAME)_bin-tests$(DEBUG_PART).tar.gz
DOCS_JDK_BUNDLE_NAME := jdk-$(BASE_NAME)_doc-api-spec$(DEBUG_PART).tar.gz
DOCS_JAVASE_BUNDLE_NAME := javase-$(BASE_NAME)_doc-api-spec$(DEBUG_PART).tar.gz
DOCS_REFERENCE_BUNDLE_NAME := jdk-reference-$(BASE_NAME)_doc-api-spec$(DEBUG_PART).tar.gz
DOCS_BUNDLE_NAME := jdk-$(BASE_NAME)_doc-api-spec$(DEBUG_PART).tar.gz
STATIC_LIBS_BUNDLE_NAME := jdk-$(BASE_NAME)_bin-static-libs$(DEBUG_PART).tar.gz
JCOV_BUNDLE_NAME := jdk-jcov-$(BASE_NAME)_bin$(DEBUG_PART).$(JDK_BUNDLE_EXTENSION)
@@ -943,9 +918,7 @@ JRE_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(JRE_BUNDLE_NAME)
JDK_SYMBOLS_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(JDK_SYMBOLS_BUNDLE_NAME)
TEST_DEMOS_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(TEST_DEMOS_BUNDLE_NAME)
TEST_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(TEST_BUNDLE_NAME)
DOCS_JDK_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(DOCS_JDK_BUNDLE_NAME)
DOCS_JAVASE_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(DOCS_JAVASE_BUNDLE_NAME)
DOCS_REFERENCE_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(DOCS_REFERENCE_BUNDLE_NAME)
DOCS_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(DOCS_BUNDLE_NAME)
JCOV_BUNDLE := $(BUNDLES_OUTPUTDIR)/$(JCOV_BUNDLE_NAME)
# This macro is called to allow inclusion of closed source counterparts.

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -32,7 +32,7 @@
# compilers and related tools that are used.
########################################################################
m4_include([toolchain_microsoft.m4])
m4_include([toolchain_windows.m4])
# All valid toolchains, regardless of platform (used by help.m4)
VALID_TOOLCHAINS_all="gcc clang xlc microsoft"
@@ -50,9 +50,9 @@ TOOLCHAIN_DESCRIPTION_microsoft="Microsoft Visual Studio"
TOOLCHAIN_DESCRIPTION_xlc="IBM XL C/C++"
# Minimum supported versions, empty means unspecified
TOOLCHAIN_MINIMUM_VERSION_clang="3.5"
TOOLCHAIN_MINIMUM_VERSION_clang="3.2"
TOOLCHAIN_MINIMUM_VERSION_gcc="5.0"
TOOLCHAIN_MINIMUM_VERSION_microsoft="19.10.0.0" # VS2017
TOOLCHAIN_MINIMUM_VERSION_microsoft="16.00.30319.01" # VS2010
TOOLCHAIN_MINIMUM_VERSION_xlc=""
# Minimum supported linker versions, empty means unspecified
@@ -179,7 +179,6 @@ AC_DEFUN([TOOLCHAIN_SETUP_FILENAME_PATTERNS],
SHARED_LIBRARY='[$]1.dll'
STATIC_LIBRARY='[$]1.lib'
OBJ_SUFFIX='.obj'
EXECUTABLE_SUFFIX='.exe'
else
LIBRARY_PREFIX=lib
SHARED_LIBRARY_SUFFIX='.so'
@@ -187,7 +186,6 @@ AC_DEFUN([TOOLCHAIN_SETUP_FILENAME_PATTERNS],
SHARED_LIBRARY='lib[$]1.so'
STATIC_LIBRARY='lib[$]1.a'
OBJ_SUFFIX='.o'
EXECUTABLE_SUFFIX=''
if test "x$OPENJDK_TARGET_OS" = xmacosx; then
# For full static builds, we're overloading the SHARED_LIBRARY
# variables in order to limit the amount of changes required.
@@ -211,7 +209,6 @@ AC_DEFUN([TOOLCHAIN_SETUP_FILENAME_PATTERNS],
AC_SUBST(SHARED_LIBRARY)
AC_SUBST(STATIC_LIBRARY)
AC_SUBST(OBJ_SUFFIX)
AC_SUBST(EXECUTABLE_SUFFIX)
])
# Determine which toolchain type to use, and make sure it is valid for this
@@ -221,12 +218,6 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
AC_ARG_WITH(toolchain-type, [AS_HELP_STRING([--with-toolchain-type],
[the toolchain type (or family) to use, use '--help' to show possible values @<:@platform dependent@:>@])])
# Linux x86_64 needs higher binutils after 8265783
# (this really is a dependency on as version, but we take ld as a check for a general binutils version)
if test "x$OPENJDK_TARGET_CPU" = "xx86_64"; then
TOOLCHAIN_MINIMUM_LD_VERSION_gcc="2.25"
fi
# Use indirect variable referencing
toolchain_var_name=VALID_TOOLCHAINS_$OPENJDK_BUILD_OS
VALID_TOOLCHAINS=${!toolchain_var_name}
@@ -301,12 +292,12 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
TOOLCHAIN_CC_BINARY_clang="clang"
TOOLCHAIN_CC_BINARY_gcc="gcc"
TOOLCHAIN_CC_BINARY_microsoft="cl"
TOOLCHAIN_CC_BINARY_microsoft="cl$EXE_SUFFIX"
TOOLCHAIN_CC_BINARY_xlc="xlclang"
TOOLCHAIN_CXX_BINARY_clang="clang++"
TOOLCHAIN_CXX_BINARY_gcc="g++"
TOOLCHAIN_CXX_BINARY_microsoft="cl"
TOOLCHAIN_CXX_BINARY_microsoft="cl$EXE_SUFFIX"
TOOLCHAIN_CXX_BINARY_xlc="xlclang++"
# Use indirect variable referencing
@@ -334,6 +325,9 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETERMINE_TOOLCHAIN_TYPE],
# special setup, e.g. additional paths etc.
AC_DEFUN_ONCE([TOOLCHAIN_PRE_DETECTION],
[
# FIXME: Is this needed?
AC_LANG(C++)
# Store the CFLAGS etc passed to the configure script.
ORG_CFLAGS="$CFLAGS"
ORG_CXXFLAGS="$CXXFLAGS"
@@ -341,26 +335,52 @@ AC_DEFUN_ONCE([TOOLCHAIN_PRE_DETECTION],
# autoconf magic only relies on PATH, so update it if tools dir is specified
OLD_PATH="$PATH"
if test "x$XCODE_VERSION_OUTPUT" != x; then
# For Xcode, we set the Xcode version as TOOLCHAIN_VERSION
TOOLCHAIN_VERSION=`$ECHO $XCODE_VERSION_OUTPUT | $CUT -f 2 -d ' '`
TOOLCHAIN_DESCRIPTION="$TOOLCHAIN_DESCRIPTION from Xcode $TOOLCHAIN_VERSION"
# On Windows, we need to detect the visual studio installation first.
# This will change the PATH, but we need to keep that new PATH even
# after toolchain detection is done, since the compiler (on x86) uses
# it for DLL resolution in runtime.
if test "x$OPENJDK_BUILD_OS" = "xwindows" \
&& test "x$TOOLCHAIN_TYPE" = "xmicrosoft"; then
TOOLCHAIN_SETUP_VISUAL_STUDIO_ENV
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# Append VS_PATH. In WSL, VS_PATH will not contain the WSL env path needed
# for using basic Unix tools, so need to keep the original PATH.
UTIL_APPEND_TO_PATH(PATH, $VS_PATH)
UTIL_APPEND_TO_PATH(WSLENV, "PATH/l:LIB:INCLUDE")
export WSLENV
else
# Reset path to VS_PATH. It will include everything that was on PATH at the time we
# ran TOOLCHAIN_SETUP_VISUAL_STUDIO_ENV.
PATH="$VS_PATH"
fi
# The microsoft toolchain also requires INCLUDE and LIB to be set.
export INCLUDE="$VS_INCLUDE"
export LIB="$VS_LIB"
else
if test "x$XCODE_VERSION_OUTPUT" != x; then
# For Xcode, we set the Xcode version as TOOLCHAIN_VERSION
TOOLCHAIN_VERSION=`$ECHO $XCODE_VERSION_OUTPUT | $CUT -f 2 -d ' '`
TOOLCHAIN_DESCRIPTION="$TOOLCHAIN_DESCRIPTION from Xcode $TOOLCHAIN_VERSION"
else
# Currently we do not define this for other toolchains. This might change as the need arise.
TOOLCHAIN_VERSION=
fi
fi
AC_SUBST(TOOLCHAIN_VERSION)
# Finally prepend TOOLCHAIN_PATH to the PATH, to allow --with-tools-dir to
# Finally add TOOLCHAIN_PATH at the beginning, to allow --with-tools-dir to
# override all other locations.
if test "x$TOOLCHAIN_PATH" != x; then
export PATH=$TOOLCHAIN_PATH:$PATH
PATH=$TOOLCHAIN_PATH:$PATH
fi
])
# Restore path, etc
AC_DEFUN_ONCE([TOOLCHAIN_POST_DETECTION],
[
# Restore old path, except for the microsoft toolchain, which requires the
# toolchain path to remain in place. Otherwise the compiler will not work in
# some siutations in later configure checks.
# Restore old path, except for the microsoft toolchain, which requires VS_PATH
# to remain in place. Otherwise the compiler will not work in some siutations
# in later configure checks.
if test "x$TOOLCHAIN_TYPE" != "xmicrosoft"; then
PATH="$OLD_PATH"
fi
@@ -407,7 +427,7 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_COMPILER_VERSION],
# First line typically looks something like:
# Microsoft (R) 32-bit C/C++ Optimizing Compiler Version 16.00.40219.01 for 80x86
# but the compiler name may vary depending on locale.
COMPILER_VERSION_OUTPUT=`$COMPILER 2>&1 1>/dev/null | $HEAD -n 1 | $TR -d '\r'`
COMPILER_VERSION_OUTPUT=`"$COMPILER" 2>&1 | $GREP -v 'ERROR.*UtilTranslatePathList' | $HEAD -n 1 | $TR -d '\r'`
# Check that this is likely to be Microsoft CL.EXE.
$ECHO "$COMPILER_VERSION_OUTPUT" | $GREP "Microsoft" > /dev/null
if test $? -ne 0; then
@@ -486,7 +506,7 @@ AC_DEFUN([TOOLCHAIN_FIND_COMPILER],
if test "x`basename [$]$1`" = "x[$]$1"; then
# A command without a complete path is provided, search $PATH.
UTIL_LOOKUP_PROGS(POTENTIAL_$1, [$]$1)
AC_PATH_PROGS(POTENTIAL_$1, [$]$1)
if test "x$POTENTIAL_$1" != x; then
$1=$POTENTIAL_$1
else
@@ -508,16 +528,34 @@ AC_DEFUN([TOOLCHAIN_FIND_COMPILER],
# If we are not cross compiling, then the default compiler name will be
# used.
UTIL_LOOKUP_TOOLCHAIN_PROGS(POTENTIAL_$1, $SEARCH_LIST)
if test "x$POTENTIAL_$1" != x; then
$1=
# If TOOLCHAIN_PATH is set, check for all compiler names in there first
# before checking the rest of the PATH.
# FIXME: Now that we prefix the TOOLS_DIR to the PATH in the PRE_DETECTION
# step, this should not be necessary.
if test -n "$TOOLCHAIN_PATH"; then
PATH_save="$PATH"
PATH="$TOOLCHAIN_PATH"
AC_PATH_TOOL(TOOLCHAIN_PATH_$1, $SEARCH_LIST)
$1=$TOOLCHAIN_PATH_$1
PATH="$PATH_save"
fi
# AC_PATH_TOOL can't be run multiple times with the same variable,
# so create a new name for this run.
if test "x[$]$1" = x; then
AC_PATH_TOOL(POTENTIAL_$1, $SEARCH_LIST)
$1=$POTENTIAL_$1
else
fi
if test "x[$]$1" = x; then
HELP_MSG_MISSING_DEPENDENCY([devkit])
AC_MSG_ERROR([Could not find a $COMPILER_NAME compiler. $HELP_MSG])
fi
fi
# Now we have a compiler binary in $1. Make sure it's okay.
UTIL_FIXUP_EXECUTABLE($1)
TEST_COMPILER="[$]$1"
AC_MSG_CHECKING([resolved symbolic links for $1])
@@ -558,7 +596,12 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_LD_VERSION],
# There is no specific version flag, but all output starts with a version string.
# First line typically looks something like:
# Microsoft (R) Incremental Linker Version 12.00.31101.0
LINKER_VERSION_STRING=`$LINKER 2>&1 | $HEAD -n 1 | $TR -d '\r'`
# Reset PATH since it can contain a mix of WSL/linux paths and Windows paths from VS,
# which, in combination with WSLENV, will make the WSL layer complain
old_path="$PATH"
PATH=
LINKER_VERSION_STRING=`$LD 2>&1 | $HEAD -n 1 | $TR -d '\r'`
PATH="$old_path"
# Extract version number
[ LINKER_VERSION_NUMBER=`$ECHO $LINKER_VERSION_STRING | \
$SED -e 's/.* \([0-9][0-9]*\(\.[0-9][0-9]*\)*\).*/\1/'` ]
@@ -569,7 +612,7 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_LD_VERSION],
# This program is free software; [...]
# If using gold it will look like:
# GNU gold (GNU Binutils 2.30) 1.15
LINKER_VERSION_STRING=`$LINKER -Wl,--version 2> /dev/null | $HEAD -n 1`
LINKER_VERSION_STRING=`$LD -Wl,--version 2> /dev/null | $HEAD -n 1`
# Extract version number
if [ [[ "$LINKER_VERSION_STRING" == *gold* ]] ]; then
[ LINKER_VERSION_NUMBER=`$ECHO $LINKER_VERSION_STRING | \
@@ -586,7 +629,7 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_LD_VERSION],
# or
# GNU ld (GNU Binutils for Ubuntu) 2.26.1
LINKER_VERSION_STRING=`$LINKER -Wl,-v 2>&1 | $HEAD -n 1`
LINKER_VERSION_STRING=`$LD -Wl,-v 2>&1 | $HEAD -n 1`
# Check if we're using the GNU ld
$ECHO "$LINKER_VERSION_STRING" | $GREP "GNU" > /dev/null
if test $? -eq 0; then
@@ -606,23 +649,6 @@ AC_DEFUN([TOOLCHAIN_EXTRACT_LD_VERSION],
AC_MSG_NOTICE([Using $TOOLCHAIN_TYPE $LINKER_NAME version $LINKER_VERSION_NUMBER @<:@$LINKER_VERSION_STRING@:>@])
])
# Make sure we did not pick up /usr/bin/link, which is the unix-style link
# executable.
#
# $1 = linker to test (LD or BUILD_LD)
AC_DEFUN(TOOLCHAIN_VERIFY_LINK_BINARY,
[
LINKER=[$]$1
AC_MSG_CHECKING([if the found link.exe is actually the Visual Studio linker])
$LINKER --version > /dev/null
if test $? -eq 0 ; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([$LINKER is the winenv link tool. Please check your PATH and rerun configure.])
else
AC_MSG_RESULT([yes])
fi
])
# Detect the core components of the toolchain, i.e. the compilers (CC and CXX),
# preprocessor (CPP and CXXCPP), the linker (LD), the assembler (AS) and the
# archiver (AR). Verify that the compilers are correct according to the
@@ -667,15 +693,42 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
#
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# In the Microsoft toolchain we have a separate LD command "link".
UTIL_LOOKUP_TOOLCHAIN_PROGS(LD, link)
TOOLCHAIN_VERIFY_LINK_BINARY(LD)
# Make sure we reject /usr/bin/link (as determined in CYGWIN_LINK), which is
# a cygwin program for something completely different.
AC_CHECK_PROG([LD], [link$EXE_SUFFIX],[link$EXE_SUFFIX],,, [$CYGWIN_LINK])
UTIL_FIXUP_EXECUTABLE(LD)
# Verify that we indeed succeeded with this trick.
AC_MSG_CHECKING([if the found link.exe is actually the Visual Studio linker])
# Reset PATH since it can contain a mix of WSL/linux paths and Windows paths from VS,
# which, in combination with WSLENV, will make the WSL layer complain
old_path="$PATH"
PATH=
"$LD" --version > /dev/null
if test $? -eq 0 ; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([This is the Cygwin link tool. Please check your PATH and rerun configure.])
else
AC_MSG_RESULT([yes])
fi
PATH="$old_path"
LDCXX="$LD"
# jaotc being a windows program expects the linker to be supplied with exe suffix.
LD_JAOTC="$LD$EXE_SUFFIX"
else
# All other toolchains use the compiler to link.
LD="$CC"
LDCXX="$CXX"
# jaotc expects 'ld' as the linker rather than the compiler.
UTIL_CHECK_TOOLS([LD_JAOTC], ld)
UTIL_FIXUP_EXECUTABLE(LD_JAOTC)
fi
AC_SUBST(LD)
AC_SUBST(LD_JAOTC)
# FIXME: it should be CXXLD, according to standard (cf CXXCPP)
AC_SUBST(LDCXX)
@@ -683,10 +736,9 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
TOOLCHAIN_PREPARE_FOR_LD_VERSION_COMPARISONS
if test "x$TOOLCHAIN_MINIMUM_LD_VERSION" != x; then
AC_MSG_NOTICE([comparing linker version to minimum version $TOOLCHAIN_MINIMUM_LD_VERSION])
TOOLCHAIN_CHECK_LINKER_VERSION(VERSION: $TOOLCHAIN_MINIMUM_LD_VERSION,
IF_OLDER_THAN: [
AC_MSG_ERROR([You are using a linker older than $TOOLCHAIN_MINIMUM_LD_VERSION. This is not a supported configuration.])
AC_MSG_WARN([You are using a linker older than $TOOLCHAIN_MINIMUM_LD_VERSION. This is not a supported configuration.])
]
)
fi
@@ -694,17 +746,8 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
#
# Setup the assembler (AS)
#
if test "x$TOOLCHAIN_TYPE" != xmicrosoft; then
AS="$CC -c"
else
if test "x$OPENJDK_TARGET_CPU_BITS" = "x64"; then
# On 64 bit windows, the assember is "ml64.exe"
UTIL_LOOKUP_TOOLCHAIN_PROGS(AS, ml64)
else
# otherwise, the assember is "ml.exe"
UTIL_LOOKUP_TOOLCHAIN_PROGS(AS, ml)
fi
fi
# FIXME: is this correct for microsoft?
AS="$CC -c"
AC_SUBST(AS)
#
@@ -712,12 +755,13 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
#
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# The corresponding ar tool is lib.exe (used to create static libraries)
UTIL_LOOKUP_TOOLCHAIN_PROGS(AR, lib)
AC_CHECK_PROG([AR], [lib$EXE_SUFFIX],[lib$EXE_SUFFIX],,,)
elif test "x$TOOLCHAIN_TYPE" = xgcc; then
UTIL_LOOKUP_TOOLCHAIN_PROGS(AR, ar gcc-ar)
UTIL_CHECK_TOOLS(AR, ar gcc-ar)
else
UTIL_LOOKUP_TOOLCHAIN_PROGS(AR, ar)
UTIL_CHECK_TOOLS(AR, ar)
fi
UTIL_FIXUP_EXECUTABLE(AR)
])
# Setup additional tools that is considered a part of the toolchain, but not the
@@ -726,52 +770,41 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_CORE],
AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_EXTRA],
[
if test "x$OPENJDK_TARGET_OS" = "xmacosx"; then
UTIL_LOOKUP_PROGS(LIPO, lipo)
UTIL_PATH_PROGS(LIPO, lipo)
UTIL_FIXUP_EXECUTABLE(LIPO)
UTIL_REQUIRE_PROGS(OTOOL, otool)
UTIL_FIXUP_EXECUTABLE(OTOOL)
UTIL_REQUIRE_PROGS(INSTALL_NAME_TOOL, install_name_tool)
UTIL_LOOKUP_TOOLCHAIN_PROGS(METAL, metal)
if test "x$METAL" = x; then
AC_MSG_CHECKING([if metal can be run using xcrun])
METAL="xcrun -sdk macosx metal"
test_metal=`$METAL --version 2>&1`
if test $? -ne 0; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([XCode tool 'metal' neither found in path nor with xcrun])
else
AC_MSG_RESULT([yes, will be using '$METAL'])
fi
fi
UTIL_LOOKUP_TOOLCHAIN_PROGS(METALLIB, metallib)
if test "x$METALLIB" = x; then
AC_MSG_CHECKING([if metallib can be run using xcrun])
METALLIB="xcrun -sdk macosx metallib"
test_metallib=`$METALLIB --version 2>&1`
if test $? -ne 0; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([XCode tool 'metallib' neither found in path nor with xcrun])
else
AC_MSG_RESULT([yes, will be using '$METALLIB'])
fi
fi
UTIL_FIXUP_EXECUTABLE(INSTALL_NAME_TOOL)
fi
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# Setup the manifest tool (MT)
UTIL_LOOKUP_TOOLCHAIN_PROGS(MT, mt)
AC_CHECK_PROG([MT], [mt$EXE_SUFFIX], [mt$EXE_SUFFIX],,, [/usr/bin/mt])
UTIL_FIXUP_EXECUTABLE(MT)
# Setup the resource compiler (RC)
UTIL_LOOKUP_TOOLCHAIN_PROGS(RC, rc)
UTIL_LOOKUP_TOOLCHAIN_PROGS(DUMPBIN, dumpbin)
AC_CHECK_PROG([RC], [rc$EXE_SUFFIX], [rc$EXE_SUFFIX],,, [/usr/bin/rc])
UTIL_FIXUP_EXECUTABLE(RC)
AC_CHECK_PROG([DUMPBIN], [dumpbin$EXE_SUFFIX], [dumpbin$EXE_SUFFIX],,,)
UTIL_FIXUP_EXECUTABLE(DUMPBIN)
# We need to check for 'msbuild.exe' because at the place where we expect to
# find 'msbuild.exe' there's also a directory called 'msbuild' and configure
# won't find the 'msbuild.exe' executable in that case (and the
# 'ac_executable_extensions' is unusable due to performance reasons).
# Notice that we intentionally don't fix up the path to MSBUILD because we
# will call it in a DOS shell during freetype detection on Windows (see
# 'LIB_SETUP_FREETYPE' in "libraries.m4"
AC_CHECK_PROG([MSBUILD], [msbuild$EXE_SUFFIX], [msbuild$EXE_SUFFIX],,,)
fi
if test "x$OPENJDK_TARGET_OS" != xwindows; then
UTIL_LOOKUP_TOOLCHAIN_PROGS(STRIP, strip)
UTIL_CHECK_TOOLS(STRIP, strip)
UTIL_FIXUP_EXECUTABLE(STRIP)
if test "x$TOOLCHAIN_TYPE" = xgcc; then
UTIL_LOOKUP_TOOLCHAIN_PROGS(NM, nm gcc-nm)
UTIL_CHECK_TOOLS(NM, nm gcc-nm)
else
UTIL_LOOKUP_TOOLCHAIN_PROGS(NM, nm)
UTIL_CHECK_TOOLS(NM, nm)
fi
UTIL_FIXUP_EXECUTABLE(NM)
GNM="$NM"
AC_SUBST(GNM)
fi
@@ -779,14 +812,25 @@ AC_DEFUN_ONCE([TOOLCHAIN_DETECT_TOOLCHAIN_EXTRA],
# objcopy is used for moving debug symbols to separate files when
# full debug symbols are enabled.
if test "x$OPENJDK_TARGET_OS" = xlinux; then
UTIL_LOOKUP_TOOLCHAIN_PROGS(OBJCOPY, gobjcopy objcopy)
UTIL_CHECK_TOOLS(OBJCOPY, [gobjcopy objcopy])
# Only call fixup if objcopy was found.
if test -n "$OBJCOPY"; then
UTIL_FIXUP_EXECUTABLE(OBJCOPY)
fi
fi
UTIL_LOOKUP_TOOLCHAIN_PROGS(OBJDUMP, gobjdump objdump)
UTIL_CHECK_TOOLS(OBJDUMP, [gobjdump objdump])
if test "x$OBJDUMP" != x; then
# Only used for compare.sh; we can live without it. UTIL_FIXUP_EXECUTABLE
# bails if argument is missing.
UTIL_FIXUP_EXECUTABLE(OBJDUMP)
fi
case $TOOLCHAIN_TYPE in
gcc|clang)
UTIL_REQUIRE_TOOLCHAIN_PROGS(CXXFILT, c++filt)
UTIL_CHECK_TOOLS(CXXFILT, [c++filt])
UTIL_CHECK_NONEMPTY(CXXFILT)
UTIL_FIXUP_EXECUTABLE(CXXFILT)
;;
esac
])
@@ -828,18 +872,14 @@ AC_DEFUN_ONCE([TOOLCHAIN_SETUP_BUILD_COMPILERS],
. $CONFIGURESUPPORT_OUTPUTDIR/build-devkit.info
# This potentially sets the following:
# A descriptive name of the devkit
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_NAME])
BASIC_EVAL_DEVKIT_VARIABLE([BUILD_DEVKIT_NAME])
# Corresponds to --with-extra-path
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_EXTRA_PATH])
BASIC_EVAL_DEVKIT_VARIABLE([BUILD_DEVKIT_EXTRA_PATH])
# Corresponds to --with-toolchain-path
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_TOOLCHAIN_PATH])
BASIC_EVAL_DEVKIT_VARIABLE([BUILD_DEVKIT_TOOLCHAIN_PATH])
# Corresponds to --with-sysroot
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_SYSROOT])
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_VS_INCLUDE])
BASIC_EVAL_BUILD_DEVKIT_VARIABLE([BUILD_DEVKIT_VS_LIB])
fi
BASIC_EVAL_DEVKIT_VARIABLE([BUILD_DEVKIT_SYSROOT])
# Skip the Window specific parts
fi
AC_MSG_CHECKING([for build platform devkit])
@@ -849,76 +889,36 @@ AC_DEFUN_ONCE([TOOLCHAIN_SETUP_BUILD_COMPILERS],
AC_MSG_RESULT([$BUILD_DEVKIT_ROOT])
fi
# Fallback default of just /bin if DEVKIT_PATH is not defined
BUILD_SYSROOT="$BUILD_DEVKIT_SYSROOT"
# Fallback default of just /bin if DEVKIT_PATH is not defined
if test "x$BUILD_DEVKIT_TOOLCHAIN_PATH" = x; then
BUILD_DEVKIT_TOOLCHAIN_PATH="$BUILD_DEVKIT_ROOT/bin"
fi
PATH="$BUILD_DEVKIT_TOOLCHAIN_PATH:$BUILD_DEVKIT_EXTRA_PATH"
BUILD_SYSROOT="$BUILD_DEVKIT_SYSROOT"
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# For historical reasons, paths are separated by ; in devkit.info
BUILD_VS_INCLUDE="${BUILD_DEVKIT_VS_INCLUDE//;/:}"
BUILD_VS_LIB="${BUILD_DEVKIT_VS_LIB//;/:}"
TOOLCHAIN_SETUP_VISUAL_STUDIO_SYSROOT_FLAGS(BUILD_, BUILD_)
fi
fi
else
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# If we got no devkit, we need to go hunting for the proper env
TOOLCHAIN_FIND_VISUAL_STUDIO_BAT_FILE($OPENJDK_BUILD_CPU, [$TOOLCHAIN_VERSION])
TOOLCHAIN_EXTRACT_VISUAL_STUDIO_ENV($OPENJDK_BUILD_CPU, BUILD_)
# We cannot currently export the VS_PATH to spec.gmk. This is probably
# strictly not correct, but seems to work anyway.
# Convert VS_INCLUDE and VS_LIB into sysroot flags
TOOLCHAIN_SETUP_VISUAL_STUDIO_SYSROOT_FLAGS(BUILD_)
fi
fi
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
UTIL_REQUIRE_PROGS(BUILD_CC, cl, [$VS_PATH])
UTIL_REQUIRE_PROGS(BUILD_CXX, cl, [$VS_PATH])
# On windows, the assember is "ml.exe". We currently don't need this so
# do not require.
if test "x$OPENJDK_BUILD_CPU_BITS" = "x64"; then
# On 64 bit windows, the assember is "ml64.exe"
UTIL_LOOKUP_PROGS(BUILD_AS, ml64, [$VS_PATH])
else
# otherwise the assember is "ml.exe"
UTIL_LOOKUP_PROGS(BUILD_AS, ml, [$VS_PATH])
fi
# On windows, the ar tool is lib.exe (used to create static libraries).
# We currently don't need this so do not require.
UTIL_LOOKUP_PROGS(BUILD_AR, lib, [$VS_PATH])
# In the Microsoft toolchain we have a separate LD command "link".
UTIL_REQUIRE_PROGS(BUILD_LD, link, [$VS_PATH])
TOOLCHAIN_VERIFY_LINK_BINARY(BUILD_LD)
BUILD_LDCXX="$BUILD_LD"
else
if test "x$OPENJDK_BUILD_OS" = xmacosx; then
UTIL_REQUIRE_PROGS(BUILD_CC, clang cc gcc)
UTIL_REQUIRE_PROGS(BUILD_CXX, clang++ CC g++)
else
UTIL_REQUIRE_PROGS(BUILD_CC, cc gcc)
UTIL_REQUIRE_PROGS(BUILD_CXX, CC g++)
fi
UTIL_LOOKUP_PROGS(BUILD_NM, nm gcc-nm)
UTIL_LOOKUP_PROGS(BUILD_AR, ar gcc-ar lib)
UTIL_LOOKUP_PROGS(BUILD_OBJCOPY, objcopy)
UTIL_LOOKUP_PROGS(BUILD_STRIP, strip)
# Assume the C compiler is the assembler
BUILD_AS="$BUILD_CC -c"
# Just like for the target compiler, use the compiler as linker
BUILD_LD="$BUILD_CC"
BUILD_LDCXX="$BUILD_CXX"
fi
# FIXME: we should list the discovered compilers as an exclude pattern!
# If we do that, we can do this detection before POST_DETECTION, and still
# find the build compilers in the tools dir, if needed.
UTIL_REQUIRE_PROGS(BUILD_CC, [cl cc gcc])
UTIL_FIXUP_EXECUTABLE(BUILD_CC)
UTIL_REQUIRE_PROGS(BUILD_CXX, [cl CC g++])
UTIL_FIXUP_EXECUTABLE(BUILD_CXX)
UTIL_PATH_PROGS(BUILD_NM, nm gcc-nm)
UTIL_FIXUP_EXECUTABLE(BUILD_NM)
UTIL_PATH_PROGS(BUILD_AR, ar gcc-ar)
UTIL_FIXUP_EXECUTABLE(BUILD_AR)
UTIL_PATH_PROGS(BUILD_OBJCOPY, objcopy)
UTIL_FIXUP_EXECUTABLE(BUILD_OBJCOPY)
UTIL_PATH_PROGS(BUILD_STRIP, strip)
UTIL_FIXUP_EXECUTABLE(BUILD_STRIP)
# Assume the C compiler is the assembler
BUILD_AS="$BUILD_CC -c"
# Just like for the target compiler, use the compiler as linker
BUILD_LD="$BUILD_CC"
BUILD_LDCXX="$BUILD_CXX"
PATH="$OLDPATH"
@@ -959,7 +959,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_MISC_CHECKS],
# Check for extra potential brokenness.
if test "x$TOOLCHAIN_TYPE" = xmicrosoft; then
# On Windows, double-check that we got the right compiler.
CC_VERSION_OUTPUT=`$CC 2>&1 1>/dev/null | $HEAD -n 1 | $TR -d '\r'`
CC_VERSION_OUTPUT=`$CC 2>&1 | $GREP -v 'ERROR.*UtilTranslatePathList' | $HEAD -n 1 | $TR -d '\r'`
COMPILER_CPU_TEST=`$ECHO $CC_VERSION_OUTPUT | $SED -n "s/^.* \(.*\)$/\1/p"`
if test "x$OPENJDK_TARGET_CPU" = "xx86"; then
if test "x$COMPILER_CPU_TEST" != "x80x86" -a "x$COMPILER_CPU_TEST" != "xx86"; then
@@ -969,10 +969,6 @@ AC_DEFUN_ONCE([TOOLCHAIN_MISC_CHECKS],
if test "x$COMPILER_CPU_TEST" != "xx64"; then
AC_MSG_ERROR([Target CPU mismatch. We are building for $OPENJDK_TARGET_CPU but CL is for "$COMPILER_CPU_TEST"; expected "x64".])
fi
elif test "x$OPENJDK_TARGET_CPU" = "xaarch64"; then
if test "x$COMPILER_CPU_TEST" != "xARM64"; then
AC_MSG_ERROR([Target CPU mismatch. We are building for $OPENJDK_TARGET_CPU but CL is for "$COMPILER_CPU_TEST"; expected "arm64".])
fi
fi
fi
@@ -1055,7 +1051,7 @@ AC_DEFUN_ONCE([TOOLCHAIN_SETUP_JTREG],
if test "x$JT_HOME" = x; then
# JT_HOME is not set in environment, or was deemed invalid.
# Try to find jtreg on path
UTIL_LOOKUP_PROGS(JTREGEXE, jtreg)
UTIL_PATH_PROGS(JTREGEXE, jtreg)
if test "x$JTREGEXE" != x; then
# That's good, now try to derive JT_HOME
JT_HOME=`(cd $($DIRNAME $JTREGEXE)/.. && pwd)`

View File

@@ -1,699 +0,0 @@
#
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
################################################################################
# The order of these defines the priority by which we try to find them.
VALID_VS_VERSIONS="2019 2017"
VS_DESCRIPTION_2017="Microsoft Visual Studio 2017"
VS_VERSION_INTERNAL_2017=141
VS_MSVCR_2017=vcruntime140.dll
VS_MSVCP_2017=msvcp140.dll
VS_ENVVAR_2017="VS150COMNTOOLS"
VS_USE_UCRT_2017="true"
VS_VS_INSTALLDIR_2017="Microsoft Visual Studio/2017"
VS_EDITIONS_2017="BuildTools Community Professional Enterprise"
VS_SDK_INSTALLDIR_2017=
VS_VS_PLATFORM_NAME_2017="v141"
VS_SDK_PLATFORM_NAME_2017=
VS_SUPPORTED_2017=true
VS_TOOLSET_SUPPORTED_2017=true
VS_DESCRIPTION_2019="Microsoft Visual Studio 2019"
VS_VERSION_INTERNAL_2019=142
VS_MSVCR_2019=vcruntime140.dll
VS_VCRUNTIME_1_2019=vcruntime140_1.dll
VS_MSVCP_2019=msvcp140.dll
VS_ENVVAR_2019="VS160COMNTOOLS"
VS_USE_UCRT_2019="true"
VS_VS_INSTALLDIR_2019="Microsoft Visual Studio/2019"
VS_EDITIONS_2019="BuildTools Community Professional Enterprise"
VS_SDK_INSTALLDIR_2019=
VS_VS_PLATFORM_NAME_2019="v142"
VS_SDK_PLATFORM_NAME_2019=
VS_SUPPORTED_2019=true
VS_TOOLSET_SUPPORTED_2019=true
################################################################################
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT],
[
if test "x$VS_ENV_CMD" = x; then
TARGET_CPU="$1"
VS_VERSION="$2"
VS_BASE="$3"
METHOD="$4"
UTIL_FIXUP_PATH(VS_BASE, NOFAIL)
if test "x$VS_BASE" != x && test -d "$VS_BASE"; then
# In VS 2017 and VS 2019, the default installation is in a subdir named after the edition.
# Find the first one present and use that.
if test "x$VS_EDITIONS" != x; then
for edition in $VS_EDITIONS; do
if test -d "$VS_BASE/$edition"; then
VS_BASE="$VS_BASE/$edition"
break
fi
done
fi
AC_MSG_NOTICE([Found Visual Studio installation at $VS_BASE using $METHOD])
if test "x$TARGET_CPU" = xx86; then
VCVARSFILES="vc/bin/vcvars32.bat vc/auxiliary/build/vcvars32.bat"
elif test "x$TARGET_CPU" = xx86_64; then
VCVARSFILES="vc/bin/amd64/vcvars64.bat vc/bin/x86_amd64/vcvarsx86_amd64.bat \
vc/auxiliary/build/vcvarsx86_amd64.bat vc/auxiliary/build/vcvars64.bat"
elif test "x$TARGET_CPU" = xaarch64; then
# for host x86-64, target aarch64
VCVARSFILES="vc/auxiliary/build/vcvarsamd64_arm64.bat \
vc/auxiliary/build/vcvarsx86_arm64.bat"
fi
for VCVARSFILE in $VCVARSFILES; do
if test -f "$VS_BASE/$VCVARSFILE"; then
VS_ENV_CMD="$VS_BASE/$VCVARSFILE"
break
fi
done
if test "x$VS_ENV_CMD" = x; then
AC_MSG_NOTICE([Warning: None of $VCVARSFILES were found, Visual Studio installation not recognized. Ignoring])
else
# PLATFORM_TOOLSET is used during the compilation of the freetype sources
# (see 'LIB_BUILD_FREETYPE' in libraries.m4) and must be one of 'v100',
# 'v110' or 'v120' for VS 2010, 2012 or VS2013
eval PLATFORM_TOOLSET="\${VS_VS_PLATFORM_NAME_${VS_VERSION}}"
fi
fi
fi
])
################################################################################
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT],
[
if test "x$VS_ENV_CMD" = x; then
TARGET_CPU="$1"
VS_VERSION="$2"
WIN_SDK_BASE="$3"
METHOD="$4"
UTIL_FIXUP_PATH(WIN_SDK_BASE, NOFAIL)
if test "x$WIN_SDK_BASE" != x && test -d "$WIN_SDK_BASE"; then
# There have been cases of partial or broken SDK installations. A missing
# lib dir is not going to work.
if test ! -d "$WIN_SDK_BASE/lib"; then
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
AC_MSG_NOTICE([Warning: Installation is broken, lib dir is missing. Ignoring])
elif test -f "$WIN_SDK_BASE/bin/setenv.cmd"; then
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
VS_ENV_CMD="$WIN_SDK_BASE/bin/setenv.cmd"
if test "x$TARGET_CPU" = xx86; then
VS_ENV_ARGS="/x86"
elif test "x$TARGET_CPU" = xx86_64; then
VS_ENV_ARGS="/x64"
elif test "x$TARGET_CPU" = xaarch64; then
VS_ENV_ARGS="/arm64"
fi
# PLATFORM_TOOLSET is used during the compilation of the freetype sources (see
# 'LIB_BUILD_FREETYPE' in libraries.m4) and must be 'Windows7.1SDK' for Windows7.1SDK
# TODO: improve detection for other versions of SDK
eval PLATFORM_TOOLSET="\${VS_SDK_PLATFORM_NAME_${VS_VERSION}}"
else
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
AC_MSG_NOTICE([Warning: Installation is broken, SetEnv.Cmd is missing. Ignoring])
fi
fi
fi
])
################################################################################
# Finds the bat or cmd file in Visual Studio or the SDK that sets up a proper
# build environment and assigns it to VS_ENV_CMD
AC_DEFUN([TOOLCHAIN_FIND_VISUAL_STUDIO_BAT_FILE],
[
# VS2017 provides the option to install previous minor versions of the MSVC
# toolsets. It is not possible to directly download earlier minor versions of
# VS2017 and in order to build with a previous minor compiler toolset version,
# it is now possible to compile with earlier minor versions by passing
# -vcvars_ver=<toolset_version> argument to vcvarsall.bat.
AC_ARG_WITH(msvc-toolset-version, [AS_HELP_STRING([--with-msvc-toolset-version],
[specific MSVC toolset version to use, passed as -vcvars_ver argument to
pass to vcvarsall.bat (Windows only)])])
TARGET_CPU="$1"
VS_VERSION="$2"
eval VS_COMNTOOLS_VAR="\${VS_ENVVAR_${VS_VERSION}}"
eval VS_COMNTOOLS="\$${VS_COMNTOOLS_VAR}"
eval VS_INSTALL_DIR="\${VS_VS_INSTALLDIR_${VS_VERSION}}"
eval VS_EDITIONS="\${VS_EDITIONS_${VS_VERSION}}"
eval SDK_INSTALL_DIR="\${VS_SDK_INSTALLDIR_${VS_VERSION}}"
eval VS_ENV_ARGS="\${VS_ENV_ARGS_${VS_VERSION}}"
eval VS_TOOLSET_SUPPORTED="\${VS_TOOLSET_SUPPORTED_${VS_VERSION}}"
VS_ENV_CMD=""
# When using --with-tools-dir, assume it points to the correct and default
# version of Visual Studio or that --with-toolchain-version was also set.
if test "x$with_tools_dir" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[$with_tools_dir/../..], [--with-tools-dir])
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[$with_tools_dir/../../..], [--with-tools-dir])
if test "x$VS_ENV_CMD" = x; then
# Having specified an argument which is incorrect will produce an instant failure;
# we should not go on looking
AC_MSG_NOTICE([The path given by --with-tools-dir does not contain a valid])
AC_MSG_NOTICE([Visual Studio installation. Please point to the VC/bin or VC/bin/amd64])
AC_MSG_NOTICE([directory within the Visual Studio installation])
AC_MSG_ERROR([Cannot locate a valid Visual Studio installation])
fi
fi
if test "x$VS_COMNTOOLS" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[$VS_COMNTOOLS/../..], [$VS_COMNTOOLS_VAR variable])
fi
if test "x$PROGRAMFILES" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[$PROGRAMFILES/$VS_INSTALL_DIR], [well-known name])
fi
# Work around the insanely named ProgramFiles(x86) env variable
PROGRAMFILES_X86="`env | $SED -n 's/^ProgramFiles(x86)=//p'`"
if test "x$PROGRAMFILES_X86" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[$PROGRAMFILES_X86/$VS_INSTALL_DIR], [well-known name])
fi
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[c:/program files/$VS_INSTALL_DIR], [well-known name])
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([$TARGET_CPU], [$VS_VERSION],
[c:/program files (x86)/$VS_INSTALL_DIR], [well-known name])
if test "x$SDK_INSTALL_DIR" != x; then
if test "x$ProgramW6432" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$TARGET_CPU], [$VS_VERSION],
[$ProgramW6432/$SDK_INSTALL_DIR], [well-known name])
fi
if test "x$PROGRAMW6432" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$TARGET_CPU], [$VS_VERSION],
[$PROGRAMW6432/$SDK_INSTALL_DIR], [well-known name])
fi
if test "x$PROGRAMFILES" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$TARGET_CPU], [$VS_VERSION],
[$PROGRAMFILES/$SDK_INSTALL_DIR], [well-known name])
fi
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$TARGET_CPU], [$VS_VERSION],
[c:/program files/$SDK_INSTALL_DIR], [well-known name])
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([$TARGET_CPU], [$VS_VERSION],
[c:/program files (x86)/$SDK_INSTALL_DIR], [well-known name])
fi
VCVARS_VER=auto
if test "x$VS_TOOLSET_SUPPORTED" != x; then
if test "x$with_msvc_toolset_version" != x; then
VCVARS_VER="$with_msvc_toolset_version"
fi
fi
])
################################################################################
AC_DEFUN([TOOLCHAIN_FIND_VISUAL_STUDIO],
[
AC_ARG_WITH(toolchain-version, [AS_HELP_STRING([--with-toolchain-version],
[the version of the toolchain to look for, use '--help' to show possible values @<:@platform dependent@:>@])])
if test "x$with_toolchain_version" = xlist; then
# List all toolchains
AC_MSG_NOTICE([The following toolchain versions are valid on this platform:])
for version in $VALID_VS_VERSIONS; do
eval VS_DESCRIPTION=\${VS_DESCRIPTION_$version}
$PRINTF " %-10s %s\n" $version "$VS_DESCRIPTION"
done
exit 0
elif test "x$DEVKIT_VS_VERSION" != x; then
VS_VERSION=$DEVKIT_VS_VERSION
TOOLCHAIN_VERSION=$VS_VERSION
# If the devkit has a name, use that as description
VS_DESCRIPTION="$DEVKIT_NAME"
if test "x$VS_DESCRIPTION" = x; then
eval VS_DESCRIPTION="\${VS_DESCRIPTION_${VS_VERSION}}"
fi
eval VS_VERSION_INTERNAL="\${VS_VERSION_INTERNAL_${VS_VERSION}}"
eval MSVCR_NAME="\${VS_MSVCR_${VS_VERSION}}"
eval VCRUNTIME_1_NAME="\${VS_VCRUNTIME_1_${VS_VERSION}}"
eval MSVCP_NAME="\${VS_MSVCP_${VS_VERSION}}"
eval USE_UCRT="\${VS_USE_UCRT_${VS_VERSION}}"
eval VS_SUPPORTED="\${VS_SUPPORTED_${VS_VERSION}}"
eval PLATFORM_TOOLSET="\${VS_VS_PLATFORM_NAME_${VS_VERSION}}"
# For historical reasons, paths are separated by ; in devkit.info
VS_INCLUDE=${DEVKIT_VS_INCLUDE//;/:}
VS_LIB=${DEVKIT_VS_LIB//;/:}
AC_MSG_NOTICE([Found devkit $VS_DESCRIPTION])
elif test "x$with_toolchain_version" != x; then
# User override; check that it is valid
if test "x${VALID_VS_VERSIONS/$with_toolchain_version/}" = "x${VALID_VS_VERSIONS}"; then
AC_MSG_NOTICE([Visual Studio version $with_toolchain_version is not valid.])
AC_MSG_NOTICE([Valid Visual Studio versions: $VALID_VS_VERSIONS.])
AC_MSG_ERROR([Cannot continue.])
fi
VS_VERSIONS_PROBE_LIST="$with_toolchain_version"
else
# No flag given, use default
VS_VERSIONS_PROBE_LIST="$VALID_VS_VERSIONS"
fi
for VS_VERSION in $VS_VERSIONS_PROBE_LIST; do
TOOLCHAIN_FIND_VISUAL_STUDIO_BAT_FILE($OPENJDK_TARGET_CPU, [$VS_VERSION])
if test "x$VS_ENV_CMD" != x; then
TOOLCHAIN_VERSION=$VS_VERSION
eval VS_DESCRIPTION="\${VS_DESCRIPTION_${VS_VERSION}}"
eval VS_VERSION_INTERNAL="\${VS_VERSION_INTERNAL_${VS_VERSION}}"
eval MSVCR_NAME="\${VS_MSVCR_${VS_VERSION}}"
eval VCRUNTIME_1_NAME="\${VS_VCRUNTIME_1_${VS_VERSION}}"
eval MSVCP_NAME="\${VS_MSVCP_${VS_VERSION}}"
eval USE_UCRT="\${VS_USE_UCRT_${VS_VERSION}}"
eval VS_SUPPORTED="\${VS_SUPPORTED_${VS_VERSION}}"
# The rest of the variables are already evaled while probing
AC_MSG_NOTICE([Found $VS_DESCRIPTION])
break
fi
done
TOOLCHAIN_DESCRIPTION="$VS_DESCRIPTION"
if test "x$VS_SUPPORTED" = "xfalse"; then
UNSUPPORTED_TOOLCHAIN_VERSION=yes
fi
])
AC_DEFUN([TOOLCHAIN_EXTRACT_VISUAL_STUDIO_ENV],
[
TARGET_CPU=$1
AC_MSG_NOTICE([Trying to extract Visual Studio environment variables for $TARGET_CPU])
AC_MSG_NOTICE([using $VS_ENV_CMD $VS_ENV_ARGS])
VS_ENV_TMP_DIR="$CONFIGURESUPPORT_OUTPUTDIR/vs-env-$TARGET_CPU"
$MKDIR -p $VS_ENV_TMP_DIR
# Cannot use the VS10 setup script directly (since it only updates the DOS subshell environment).
# Instead create a shell script which will set the relevant variables when run.
OLDPATH="$PATH"
# Make sure we only capture additions to PATH needed by VS.
# Clear out path, but need system dir present for vsvars cmd file to be able to run
export PATH=$WINENV_PREFIX/c/windows/system32
# The "| cat" is to stop SetEnv.Cmd to mess with system colors on some systems
# We can't pass -vcvars_ver=$VCVARS_VER here because cmd.exe eats all '='
# in bat file arguments. :-(
$FIXPATH $CMD /c "$TOPDIR/make/scripts/extract-vs-env.cmd" "$VS_ENV_CMD" \
"$VS_ENV_TMP_DIR/set-vs-env.sh" $VCVARS_VER $VS_ENV_ARGS \
> $VS_ENV_TMP_DIR/extract-vs-env.log | $CAT 2>&1
PATH="$OLDPATH"
if test ! -s $VS_ENV_TMP_DIR/set-vs-env.sh; then
AC_MSG_NOTICE([Could not succesfully extract the environment variables needed for the VS setup.])
AC_MSG_NOTICE([Try setting --with-tools-dir to the VC/bin directory within the VS installation.])
AC_MSG_NOTICE([To analyze the problem, see extract-vs-env.log and extract-vs-env.bat in])
AC_MSG_NOTICE([$VS_ENV_TMP_DIR.])
AC_MSG_ERROR([Cannot continue])
fi
# Remove windows line endings
$SED -i -e 's|\r||g' $VS_ENV_TMP_DIR/set-vs-env.sh
# Now set all paths and other env variables by executing the generated
# shell script. This will allow the rest of the configure script to find
# and run the compiler in the proper way.
AC_MSG_NOTICE([Setting extracted environment variables for $TARGET_CPU])
. $VS_ENV_TMP_DIR/set-vs-env.sh
# Extract only what VS_ENV_CMD added to the PATH
VS_PATH=${PATH_AFTER/"$PATH_BEFORE"}
VS_PATH=${VS_PATH//::/:}
# Remove any paths containing # (typically F#) as that messes up make. This
# is needed if visual studio was installed with F# support.
[ VS_PATH=`$ECHO "$VS_PATH" | $SED 's/[^:#]*#[^:]*://g'` ]
# Sometimes case is off
if test -z "$WINDOWSSDKDIR"; then
WINDOWSSDKDIR="$WindowsSdkDir"
fi
# Now we have VS_PATH, VS_INCLUDE, VS_LIB. For further checking, we
# also define VCINSTALLDIR and WINDOWSSDKDIR. All are in
# unix style.
])
################################################################################
# Check if the VS env variables were setup prior to running configure.
# If not, then find vcvarsall.bat and run it automatically, and integrate
# the set env variables into the spec file.
AC_DEFUN([TOOLCHAIN_SETUP_VISUAL_STUDIO_ENV],
[
# Locate the vsvars bat file and save it as VS_ENV_CMD
TOOLCHAIN_FIND_VISUAL_STUDIO
# If we have a devkit, we don't need to run VS_ENV_CMD
if test "x$DEVKIT_VS_VERSION" = x; then
if test "x$VS_ENV_CMD" != x; then
# We have found a Visual Studio environment on disk, let's extract variables
# from the vsvars bat file into shell variables in the configure script.
TOOLCHAIN_EXTRACT_VISUAL_STUDIO_ENV($OPENJDK_TARGET_CPU)
# Now we have VS_PATH, VS_INCLUDE, VS_LIB. For further checking, we
# also define VCINSTALLDIR and WINDOWSSDKDIR. All are in
# unix style.
else
# We did not find a vsvars bat file.
AC_MSG_ERROR([Cannot locate a valid Visual Studio installation])
fi
fi
# At this point, we should have correct variables in the environment
AC_MSG_CHECKING([that Visual Studio variables have been correctly extracted])
if test "x$VCINSTALLDIR" != x || test "x$WINDOWSSDKDIR" != x \
|| test "x$DEVKIT_NAME" != x; then
if test "x$VS_INCLUDE" = x || test "x$VS_LIB" = x; then
AC_MSG_RESULT([no; Visual Studio present but broken])
AC_MSG_ERROR([Your VC command prompt seems broken, INCLUDE and/or LIB is missing.])
else
AC_MSG_RESULT([ok])
# Turn VS_PATH into TOOLCHAIN_PATH
TOOLCHAIN_PATH="$TOOLCHAIN_PATH:$VS_PATH"
# Convert VS_INCLUDE and VS_LIB into sysroot flags
TOOLCHAIN_SETUP_VISUAL_STUDIO_SYSROOT_FLAGS
fi
else
AC_MSG_RESULT([not found])
if test "x$VS_ENV_CMD" = x; then
AC_MSG_NOTICE([Cannot locate a valid Visual Studio or Windows SDK installation on disk])
else
AC_MSG_NOTICE([Running the extraction script failed])
fi
AC_MSG_NOTICE([Try setting --with-tools-dir to the VC/bin directory within the VS installation.])
AC_MSG_NOTICE([To analyze the problem, see extract-vs-env.log and extract-vs-env.bat in])
AC_MSG_NOTICE([$VS_ENV_TMP_DIR.])
AC_MSG_ERROR([Cannot continue])
fi
])
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL],
[
DLL_NAME="$1"
POSSIBLE_MSVC_DLL="$2"
METHOD="$3"
if test -n "$POSSIBLE_MSVC_DLL" -a -e "$POSSIBLE_MSVC_DLL"; then
AC_MSG_NOTICE([Found $DLL_NAME at $POSSIBLE_MSVC_DLL using $METHOD])
# Need to check if the found msvcr is correct architecture
AC_MSG_CHECKING([found $DLL_NAME architecture])
MSVC_DLL_FILETYPE=`$FILE -b "$POSSIBLE_MSVC_DLL"`
if test "x$OPENJDK_TARGET_CPU" = xx86; then
CORRECT_MSVCR_ARCH=386
elif test "x$OPENJDK_TARGET_CPU" = xx86_64; then
CORRECT_MSVCR_ARCH=x86-64
elif test "x$OPENJDK_TARGET_CPU" = xaarch64; then
# The cygwin 'file' command only returns "PE32+ executable (DLL) (console), for MS Windows",
# without specifying which architecture it is for specifically. This has been fixed upstream.
# https://github.com/file/file/commit/b849b1af098ddd530094bf779b58431395db2e10#diff-ff2eced09e6860de75057dd731d092aeR142
CORRECT_MSVCR_ARCH="PE32+ executable"
fi
if $ECHO "$MSVC_DLL_FILETYPE" | $GREP "$CORRECT_MSVCR_ARCH" 2>&1 > /dev/null; then
AC_MSG_RESULT([ok])
MSVC_DLL="$POSSIBLE_MSVC_DLL"
AC_MSG_CHECKING([for $DLL_NAME])
AC_MSG_RESULT([$MSVC_DLL])
else
AC_MSG_RESULT([incorrect, ignoring])
AC_MSG_NOTICE([The file type of the located $DLL_NAME is $MSVC_DLL_FILETYPE])
fi
fi
])
AC_DEFUN([TOOLCHAIN_SETUP_MSVC_DLL],
[
DLL_NAME="$1"
MSVC_DLL=
if test "x$OPENJDK_TARGET_CPU" = xx86; then
vs_target_cpu=x86
elif test "x$OPENJDK_TARGET_CPU" = xx86_64; then
vs_target_cpu=x64
elif test "x$OPENJDK_TARGET_CPU" = xaarch64; then
vs_target_cpu=arm64
fi
if test "x$MSVC_DLL" = x; then
if test "x$VCINSTALLDIR" != x; then
if test "$VS_VERSION" -lt 2017; then
# Probe: Using well-known location from Visual Studio 12.0 and older
POSSIBLE_MSVC_DLL="$VCINSTALLDIR/redist/$vs_target_cpu/microsoft.vc${VS_VERSION_INTERNAL}.crt/$DLL_NAME"
else
# Probe: Using well-known location from VS 2017 and VS 2019
POSSIBLE_MSVC_DLL="`ls $VCToolsRedistDir/$vs_target_cpu/microsoft.vc${VS_VERSION_INTERNAL}.crt/$DLL_NAME 2> /dev/null`"
fi
# In case any of the above finds more than one file, loop over them.
for possible_msvc_dll in $POSSIBLE_MSVC_DLL; do
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$possible_msvc_dll],
[well-known location in VCINSTALLDIR])
done
fi
fi
if test "x$MSVC_DLL" = x; then
# Probe: Check in the Boot JDK directory.
POSSIBLE_MSVC_DLL="$BOOT_JDK/bin/$DLL_NAME"
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[well-known location in Boot JDK])
fi
if test "x$MSVC_DLL" = x; then
# Probe: Look in the Windows system32 directory
WIN_SYSTEMROOT="$SYSTEMROOT"
UTIL_FIXUP_PATH(WIN_SYSTEMROOT, NOFAIL)
if test "x$WIN_SYSTEMROOT" != x; then
POSSIBLE_MSVC_DLL="$WIN_SYSTEMROOT/system32/$DLL_NAME"
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[well-known location in SYSTEMROOT])
fi
fi
if test "x$MSVC_DLL" = x; then
# Probe: If Visual Studio Express is installed, there is usually one with the debugger
if test "x$VS100COMNTOOLS" != x; then
WIN_VS_TOOLS_DIR="$VS100COMNTOOLS/.."
UTIL_FIXUP_PATH(WIN_VS_TOOLS_DIR, NOFAIL)
if test "x$WIN_VS_TOOLS_DIR" != x; then
POSSIBLE_MSVC_DLL=`$FIND "$WIN_VS_TOOLS_DIR" -name $DLL_NAME \
| $GREP -i /$vs_target_cpu/ | $HEAD --lines 1`
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[search of VS100COMNTOOLS])
fi
fi
fi
if test "x$MSVC_DLL" = x; then
# Probe: Search wildly in the VCINSTALLDIR. We've probably lost by now.
# (This was the original behaviour; kept since it might turn something up)
if test "x$VCINSTALLDIR" != x; then
if test "x$OPENJDK_TARGET_CPU" = xx86; then
POSSIBLE_MSVC_DLL=`$FIND "$VCINSTALLDIR" -name $DLL_NAME \
| $GREP x86 | $GREP -v ia64 | $GREP -v x64 | $GREP -v arm64 | $HEAD --lines 1`
if test "x$POSSIBLE_MSVC_DLL" = x; then
# We're grasping at straws now...
POSSIBLE_MSVC_DLL=`$FIND "$VCINSTALLDIR" -name $DLL_NAME \
| $HEAD --lines 1`
fi
else
POSSIBLE_MSVC_DLL=`$FIND "$VCINSTALLDIR" -name $DLL_NAME \
| $GREP x64 | $HEAD --lines 1`
fi
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[search of VCINSTALLDIR])
fi
fi
if test "x$MSVC_DLL" = x; then
AC_MSG_CHECKING([for $DLL_NAME])
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find $DLL_NAME. Please specify using --with-msvcr-dll.])
fi
])
AC_DEFUN([TOOLCHAIN_SETUP_VS_RUNTIME_DLLS],
[
AC_ARG_WITH(msvcr-dll, [AS_HELP_STRING([--with-msvcr-dll],
[path to microsoft C runtime dll (msvcr*.dll) (Windows only) @<:@probed@:>@])])
if test "x$with_msvcr_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCR_NAME, [$with_msvcr_dll], [--with-msvcr-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCR_NAME as specified by --with-msvcr-dll])
fi
MSVCR_DLL="$MSVC_DLL"
elif test "x$DEVKIT_MSVCR_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCR_NAME, [$DEVKIT_MSVCR_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCR_NAME as specified by devkit])
fi
MSVCR_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${MSVCR_NAME}])
MSVCR_DLL="$MSVC_DLL"
fi
AC_SUBST(MSVCR_DLL)
AC_ARG_WITH(msvcp-dll, [AS_HELP_STRING([--with-msvcp-dll],
[path to microsoft C++ runtime dll (msvcp*.dll) (Windows only) @<:@probed@:>@])])
if test "x$MSVCP_NAME" != "x"; then
if test "x$with_msvcp_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCP_NAME, [$with_msvcp_dll], [--with-msvcp-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCP_NAME as specified by --with-msvcp-dll])
fi
MSVCP_DLL="$MSVC_DLL"
elif test "x$DEVKIT_MSVCP_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCP_NAME, [$DEVKIT_MSVCP_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCP_NAME as specified by devkit])
fi
MSVCP_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${MSVCP_NAME}])
MSVCP_DLL="$MSVC_DLL"
fi
AC_SUBST(MSVCP_DLL)
fi
AC_ARG_WITH(vcruntime-1-dll, [AS_HELP_STRING([--with-vcruntime-1-dll],
[path to microsoft C++ runtime dll (vcruntime*_1.dll) (Windows x64 only) @<:@probed@:>@])])
if test "x$VCRUNTIME_1_NAME" != "x" && test "x$OPENJDK_TARGET_CPU" = xx86_64; then
if test "x$with_vcruntime_1_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($VCRUNTIME_1_NAME, [$with_vcruntime_1_dll],
[--with-vcruntime-1-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $VCRUNTIME_1_NAME as specified by --with-vcruntime-1-dll])
fi
VCRUNTIME_1_DLL="$MSVC_DLL"
elif test "x$DEVKIT_VCRUNTIME_1_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($VCRUNTIME_1_NAME, [$DEVKIT_VCRUNTIME_1_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $VCRUNTIME_1_NAME as specified by devkit])
fi
VCRUNTIME_1_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${VCRUNTIME_1_NAME}])
VCRUNTIME_1_DLL="$MSVC_DLL"
fi
fi
AC_SUBST(VCRUNTIME_1_DLL)
AC_ARG_WITH(ucrt-dll-dir, [AS_HELP_STRING([--with-ucrt-dll-dir],
[path to Microsoft Windows Kit UCRT DLL dir (Windows only) @<:@probed@:>@])])
if test "x$USE_UCRT" = "xtrue" && test "x$OPENJDK_TARGET_CPU" != xaarch64; then
AC_MSG_CHECKING([for UCRT DLL dir])
if test "x$with_ucrt_dll_dir" != x; then
if test -z "$(ls -d "$with_ucrt_dll_dir/"*.dll 2> /dev/null)"; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find any dlls in $with_ucrt_dll_dir])
else
AC_MSG_RESULT([$with_ucrt_dll_dir])
UCRT_DLL_DIR="$with_ucrt_dll_dir"
UTIL_FIXUP_PATH([UCRT_DLL_DIR])
fi
elif test "x$DEVKIT_UCRT_DLL_DIR" != "x"; then
UCRT_DLL_DIR="$DEVKIT_UCRT_DLL_DIR"
AC_MSG_RESULT($UCRT_DLL_DIR)
else
dll_subdir=$OPENJDK_TARGET_CPU
if test "x$dll_subdir" = "xaarch64"; then
dll_subdir="arm64"
elif test "x$dll_subdir" = "xx86_64"; then
dll_subdir="x64"
fi
UCRT_DLL_DIR="$WINDOWSSDKDIR/redist/ucrt/dlls/$dll_subdir"
if test -z "$(ls -d "$UCRT_DLL_DIR/"*.dll 2> /dev/null)"; then
# Try with version subdir
UCRT_DLL_DIR="`ls -d $WINDOWSSDKDIR/redist/*/ucrt/dlls/$dll_subdir \
2> /dev/null | $SORT -d | $HEAD -n1`"
if test -z "$UCRT_DLL_DIR" \
|| test -z "$(ls -d "$UCRT_DLL_DIR/"*.dll 2> /dev/null)"; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find any dlls in $UCRT_DLL_DIR])
else
AC_MSG_RESULT($UCRT_DLL_DIR)
fi
else
AC_MSG_RESULT($UCRT_DLL_DIR)
fi
fi
else
UCRT_DLL_DIR=
fi
AC_SUBST(UCRT_DLL_DIR)
])
# Setup the sysroot flags and add them to global CFLAGS and LDFLAGS so
# that configure can use them while detecting compilers.
# TOOLCHAIN_TYPE is available here.
# Param 1 - Optional prefix to SYSROOT variables. (e.g BUILD_)
# Param 2 - Optional prefix to VS variables. (e.g BUILD_)
AC_DEFUN([TOOLCHAIN_SETUP_VISUAL_STUDIO_SYSROOT_FLAGS],
[
OLDIFS="$IFS"
IFS=":"
# Convert VS_INCLUDE into SYSROOT_CFLAGS
for ipath in [$]$2VS_INCLUDE; do
$1SYSROOT_CFLAGS="[$]$1SYSROOT_CFLAGS -I$ipath"
done
# Convert VS_LIB into SYSROOT_LDFLAGS
for libpath in [$]$2VS_LIB; do
$1SYSROOT_LDFLAGS="[$]$1SYSROOT_LDFLAGS -libpath:$libpath"
done
IFS="$OLDIFS"
AC_SUBST($1SYSROOT_CFLAGS)
AC_SUBST($1SYSROOT_LDFLAGS)
])

View File

@@ -0,0 +1,876 @@
#
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
################################################################################
# The order of these defines the priority by which we try to find them.
VALID_VS_VERSIONS="2019 2017 2013 2015 2012 2010"
VS_DESCRIPTION_2010="Microsoft Visual Studio 2010"
VS_VERSION_INTERNAL_2010=100
VS_MSVCR_2010=msvcr100.dll
# We don't use msvcp on Visual Studio 2010
#VS_MSVCP_2010=msvcp100.dll
VS_ENVVAR_2010="VS100COMNTOOLS"
VS_VS_INSTALLDIR_2010="Microsoft Visual Studio 10.0"
VS_SDK_INSTALLDIR_2010="Microsoft SDKs/Windows/v7.1"
VS_VS_PLATFORM_NAME_2010="v100"
VS_SDK_PLATFORM_NAME_2010="Windows7.1SDK"
VS_SUPPORTED_2010=false
VS_DESCRIPTION_2012="Microsoft Visual Studio 2012"
VS_VERSION_INTERNAL_2012=110
VS_MSVCR_2012=msvcr110.dll
VS_MSVCP_2012=msvcp110.dll
VS_ENVVAR_2012="VS110COMNTOOLS"
VS_VS_INSTALLDIR_2012="Microsoft Visual Studio 11.0"
VS_SDK_INSTALLDIR_2012=
VS_VS_PLATFORM_NAME_2012="v110"
VS_SDK_PLATFORM_NAME_2012=
VS_SUPPORTED_2012=false
VS_DESCRIPTION_2013="Microsoft Visual Studio 2013"
VS_VERSION_INTERNAL_2013=120
VS_MSVCR_2013=msvcr120.dll
VS_MSVCP_2013=msvcp120.dll
VS_ENVVAR_2013="VS120COMNTOOLS"
VS_VS_INSTALLDIR_2013="Microsoft Visual Studio 12.0"
VS_SDK_INSTALLDIR_2013=
VS_VS_PLATFORM_NAME_2013="v120"
VS_SDK_PLATFORM_NAME_2013=
VS_SUPPORTED_2013=false
VS_DESCRIPTION_2015="Microsoft Visual Studio 2015"
VS_VERSION_INTERNAL_2015=140
VS_MSVCR_2015=vcruntime140.dll
VS_MSVCP_2015=msvcp140.dll
VS_ENVVAR_2015="VS140COMNTOOLS"
VS_VS_INSTALLDIR_2015="Microsoft Visual Studio 14.0"
VS_SDK_INSTALLDIR_2015=
VS_VS_PLATFORM_NAME_2015="v140"
VS_SDK_PLATFORM_NAME_2015=
# The vcvars of 2015 breaks if 2017 is also installed. Work around this by
# explicitly specifying Windows Kit 8.1 to be used.
VS_ENV_ARGS_2015="8.1"
VS_SUPPORTED_2015=false
VS_DESCRIPTION_2017="Microsoft Visual Studio 2017"
VS_VERSION_INTERNAL_2017=141
VS_MSVCR_2017=vcruntime140.dll
VS_MSVCP_2017=msvcp140.dll
VS_ENVVAR_2017="VS150COMNTOOLS"
VS_USE_UCRT_2017="true"
VS_VS_INSTALLDIR_2017="Microsoft Visual Studio/2017"
VS_EDITIONS_2017="BuildTools Community Professional Enterprise"
VS_SDK_INSTALLDIR_2017=
VS_VS_PLATFORM_NAME_2017="v141"
VS_SDK_PLATFORM_NAME_2017=
VS_SUPPORTED_2017=true
VS_TOOLSET_SUPPORTED_2017=true
VS_DESCRIPTION_2019="Microsoft Visual Studio 2019"
VS_VERSION_INTERNAL_2019=142
VS_MSVCR_2019=vcruntime140.dll
VS_VCRUNTIME_1_2019=vcruntime140_1.dll
VS_MSVCP_2019=msvcp140.dll
VS_ENVVAR_2019="VS160COMNTOOLS"
VS_USE_UCRT_2019="true"
VS_VS_INSTALLDIR_2019="Microsoft Visual Studio/2019"
VS_EDITIONS_2019="BuildTools Community Professional Enterprise"
VS_SDK_INSTALLDIR_2019=
VS_VS_PLATFORM_NAME_2019="v142"
VS_SDK_PLATFORM_NAME_2019=
VS_SUPPORTED_2019=true
VS_TOOLSET_SUPPORTED_2019=true
################################################################################
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT],
[
if test "x$VS_ENV_CMD" = x; then
VS_VERSION="$1"
VS_BASE="$2"
METHOD="$3"
UTIL_REWRITE_AS_UNIX_PATH(VS_BASE)
# In VS 2017 and VS 2019, the default installation is in a subdir named after the edition.
# Find the first one present and use that.
if test "x$VS_EDITIONS" != x; then
for edition in $VS_EDITIONS; do
if test -d "$VS_BASE/$edition"; then
VS_BASE="$VS_BASE/$edition"
break
fi
done
fi
if test -d "$VS_BASE"; then
AC_MSG_NOTICE([Found Visual Studio installation at $VS_BASE using $METHOD])
if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
VCVARSFILES="vc/bin/vcvars32.bat vc/auxiliary/build/vcvars32.bat"
else
VCVARSFILES="vc/bin/amd64/vcvars64.bat vc/bin/x86_amd64/vcvarsx86_amd64.bat \
VC/Auxiliary/Build/vcvarsx86_amd64.bat VC/Auxiliary/Build/vcvars64.bat"
fi
for VCVARSFILE in $VCVARSFILES; do
if test -f "$VS_BASE/$VCVARSFILE"; then
VS_ENV_CMD="$VS_BASE/$VCVARSFILE"
break
fi
done
if test "x$VS_ENV_CMD" = x; then
AC_MSG_NOTICE([Warning: None of $VCVARSFILES were found, Visual Studio installation not recognized. Ignoring])
else
# PLATFORM_TOOLSET is used during the compilation of the freetype sources
# (see 'LIB_BUILD_FREETYPE' in libraries.m4) and must be one of 'v100',
# 'v110' or 'v120' for VS 2010, 2012 or VS2013
eval PLATFORM_TOOLSET="\${VS_VS_PLATFORM_NAME_${VS_VERSION}}"
fi
fi
fi
])
################################################################################
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT],
[
if test "x$VS_ENV_CMD" = x; then
VS_VERSION="$1"
WIN_SDK_BASE="$2"
METHOD="$3"
UTIL_REWRITE_AS_UNIX_PATH(WIN_SDK_BASE)
if test -d "$WIN_SDK_BASE"; then
# There have been cases of partial or broken SDK installations. A missing
# lib dir is not going to work.
if test ! -d "$WIN_SDK_BASE/lib"; then
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
AC_MSG_NOTICE([Warning: Installation is broken, lib dir is missing. Ignoring])
elif test -f "$WIN_SDK_BASE/Bin/SetEnv.Cmd"; then
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
VS_ENV_CMD="$WIN_SDK_BASE/Bin/SetEnv.Cmd"
if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
VS_ENV_ARGS="/x86"
else
VS_ENV_ARGS="/x64"
fi
# PLATFORM_TOOLSET is used during the compilation of the freetype sources (see
# 'LIB_BUILD_FREETYPE' in libraries.m4) and must be 'Windows7.1SDK' for Windows7.1SDK
# TODO: improve detection for other versions of SDK
eval PLATFORM_TOOLSET="\${VS_SDK_PLATFORM_NAME_${VS_VERSION}}"
else
AC_MSG_NOTICE([Found Windows SDK installation at $WIN_SDK_BASE using $METHOD])
AC_MSG_NOTICE([Warning: Installation is broken, SetEnv.Cmd is missing. Ignoring])
fi
fi
fi
])
################################################################################
# Finds the bat or cmd file in Visual Studio or the SDK that sets up a proper
# build environment and assigns it to VS_ENV_CMD
AC_DEFUN([TOOLCHAIN_FIND_VISUAL_STUDIO_BAT_FILE],
[
# VS2017 provides the option to install previous minor versions of the MSVC
# toolsets. It is not possible to directly download earlier minor versions of
# VS2017 and in order to build with a previous minor compiler toolset version,
# it is now possible to compile with earlier minor versions by passing
# -vcvars_ver=<toolset_version> argument to vcvarsall.bat.
AC_ARG_WITH(msvc-toolset-version, [AS_HELP_STRING([--with-msvc-toolset-version],
[specific MSVC toolset version to use, passed as -vcvars_ver argument to
pass to vcvarsall.bat (Windows only)])])
VS_VERSION="$1"
eval VS_COMNTOOLS_VAR="\${VS_ENVVAR_${VS_VERSION}}"
eval VS_COMNTOOLS="\$${VS_COMNTOOLS_VAR}"
eval VS_INSTALL_DIR="\${VS_VS_INSTALLDIR_${VS_VERSION}}"
eval VS_EDITIONS="\${VS_EDITIONS_${VS_VERSION}}"
eval SDK_INSTALL_DIR="\${VS_SDK_INSTALLDIR_${VS_VERSION}}"
eval VS_ENV_ARGS="\${VS_ENV_ARGS_${VS_VERSION}}"
eval VS_TOOLSET_SUPPORTED="\${VS_TOOLSET_SUPPORTED_${VS_VERSION}}"
VS_ENV_CMD=""
# When using --with-tools-dir, assume it points to the correct and default
# version of Visual Studio or that --with-toolchain-version was also set.
if test "x$with_tools_dir" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[$with_tools_dir/../..], [--with-tools-dir])
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[$with_tools_dir/../../..], [--with-tools-dir])
if test "x$VS_ENV_CMD" = x; then
# Having specified an argument which is incorrect will produce an instant failure;
# we should not go on looking
AC_MSG_NOTICE([The path given by --with-tools-dir does not contain a valid])
AC_MSG_NOTICE([Visual Studio installation. Please point to the VC/bin or VC/bin/amd64])
AC_MSG_NOTICE([directory within the Visual Studio installation])
AC_MSG_ERROR([Cannot locate a valid Visual Studio installation])
fi
fi
if test "x$VS_COMNTOOLS" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[$VS_COMNTOOLS/../..], [$VS_COMNTOOLS_VAR variable])
fi
if test "x$PROGRAMFILES" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[$PROGRAMFILES/$VS_INSTALL_DIR], [well-known name])
fi
# Work around the insanely named ProgramFiles(x86) env variable
PROGRAMFILES_X86="`env | $SED -n 's/^ProgramFiles(x86)=//p'`"
if test "x$PROGRAMFILES_X86" != x; then
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[$PROGRAMFILES_X86/$VS_INSTALL_DIR], [well-known name])
fi
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[C:/Program Files/$VS_INSTALL_DIR], [well-known name])
TOOLCHAIN_CHECK_POSSIBLE_VISUAL_STUDIO_ROOT([${VS_VERSION}],
[C:/Program Files (x86)/$VS_INSTALL_DIR], [well-known name])
if test "x$SDK_INSTALL_DIR" != x; then
if test "x$ProgramW6432" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([${VS_VERSION}],
[$ProgramW6432/$SDK_INSTALL_DIR], [well-known name])
fi
if test "x$PROGRAMW6432" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([${VS_VERSION}],
[$PROGRAMW6432/$SDK_INSTALL_DIR], [well-known name])
fi
if test "x$PROGRAMFILES" != x; then
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([${VS_VERSION}],
[$PROGRAMFILES/$SDK_INSTALL_DIR], [well-known name])
fi
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([${VS_VERSION}],
[C:/Program Files/$SDK_INSTALL_DIR], [well-known name])
TOOLCHAIN_CHECK_POSSIBLE_WIN_SDK_ROOT([${VS_VERSION}],
[C:/Program Files (x86)/$SDK_INSTALL_DIR], [well-known name])
fi
if test "x$VS_TOOLSET_SUPPORTED" != x; then
if test "x$with_msvc_toolset_version" != x; then
VS_ENV_ARGS="$VS_ENV_ARGS -vcvars_ver=$with_msvc_toolset_version"
fi
fi
])
################################################################################
AC_DEFUN([TOOLCHAIN_FIND_VISUAL_STUDIO],
[
AC_ARG_WITH(toolchain-version, [AS_HELP_STRING([--with-toolchain-version],
[the version of the toolchain to look for, use '--help' to show possible values @<:@platform dependent@:>@])])
if test "x$with_toolchain_version" = xlist; then
# List all toolchains
AC_MSG_NOTICE([The following toolchain versions are valid on this platform:])
for version in $VALID_VS_VERSIONS; do
eval VS_DESCRIPTION=\${VS_DESCRIPTION_$version}
$PRINTF " %-10s %s\n" $version "$VS_DESCRIPTION"
done
exit 0
elif test "x$DEVKIT_VS_VERSION" != x; then
VS_VERSION=$DEVKIT_VS_VERSION
TOOLCHAIN_VERSION=$VS_VERSION
# If the devkit has a name, use that as description
VS_DESCRIPTION="$DEVKIT_NAME"
if test "x$VS_DESCRIPTION" = x; then
eval VS_DESCRIPTION="\${VS_DESCRIPTION_${VS_VERSION}}"
fi
eval VS_VERSION_INTERNAL="\${VS_VERSION_INTERNAL_${VS_VERSION}}"
eval MSVCR_NAME="\${VS_MSVCR_${VS_VERSION}}"
eval VCRUNTIME_1_NAME="\${VS_VCRUNTIME_1_${VS_VERSION}}"
eval MSVCP_NAME="\${VS_MSVCP_${VS_VERSION}}"
eval USE_UCRT="\${VS_USE_UCRT_${VS_VERSION}}"
eval VS_SUPPORTED="\${VS_SUPPORTED_${VS_VERSION}}"
eval PLATFORM_TOOLSET="\${VS_VS_PLATFORM_NAME_${VS_VERSION}}"
# The TOOLCHAIN_PATH from a devkit is in Unix format. In WSL we need a
# windows version of the complete VS_PATH as VS_PATH_WINDOWS
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# Convert the toolchain path
OLDIFS="$IFS"
IFS=":"
VS_PATH_WINDOWS=""
for i in $TOOLCHAIN_PATH; do
path=$i
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([path])
VS_PATH_WINDOWS="$VS_PATH_WINDOWS;$path"
done
IFS="$OLDIFS"
# Append the current path from Windows env
WINDOWS_PATH="`$CMD /c echo %PATH%`"
VS_PATH_WINDOWS="$VS_PATH_WINDOWS;$WINDOWS_PATH"
else
VS_PATH="$TOOLCHAIN_PATH:$PATH"
fi
# Convert DEVKIT_VS_INCLUDE into windows style VS_INCLUDE so that it
# can still be exported as INCLUDE for compiler invocations without
# SYSROOT_CFLAGS
OLDIFS="$IFS"
IFS=";"
for i in $DEVKIT_VS_INCLUDE; do
ipath=$i
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([ipath])
VS_INCLUDE="$VS_INCLUDE;$ipath"
done
# Convert DEVKIT_VS_LIB into VS_LIB so that it can still be exported
# as LIB for compiler invocations without SYSROOT_LDFLAGS
for i in $DEVKIT_VS_LIB; do
libpath=$i
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([libpath])
VS_LIB="$VS_LIB;$libpath"
done
IFS="$OLDIFS"
AC_MSG_NOTICE([Found devkit $VS_DESCRIPTION])
elif test "x$with_toolchain_version" != x; then
# User override; check that it is valid
if test "x${VALID_VS_VERSIONS/$with_toolchain_version/}" = "x${VALID_VS_VERSIONS}"; then
AC_MSG_NOTICE([Visual Studio version $with_toolchain_version is not valid.])
AC_MSG_NOTICE([Valid Visual Studio versions: $VALID_VS_VERSIONS.])
AC_MSG_ERROR([Cannot continue.])
fi
VS_VERSIONS_PROBE_LIST="$with_toolchain_version"
else
# No flag given, use default
VS_VERSIONS_PROBE_LIST="$VALID_VS_VERSIONS"
fi
for VS_VERSION in $VS_VERSIONS_PROBE_LIST; do
TOOLCHAIN_FIND_VISUAL_STUDIO_BAT_FILE([$VS_VERSION])
if test "x$VS_ENV_CMD" != x; then
TOOLCHAIN_VERSION=$VS_VERSION
eval VS_DESCRIPTION="\${VS_DESCRIPTION_${VS_VERSION}}"
eval VS_VERSION_INTERNAL="\${VS_VERSION_INTERNAL_${VS_VERSION}}"
eval MSVCR_NAME="\${VS_MSVCR_${VS_VERSION}}"
eval VCRUNTIME_1_NAME="\${VS_VCRUNTIME_1_${VS_VERSION}}"
eval MSVCP_NAME="\${VS_MSVCP_${VS_VERSION}}"
eval USE_UCRT="\${VS_USE_UCRT_${VS_VERSION}}"
eval VS_SUPPORTED="\${VS_SUPPORTED_${VS_VERSION}}"
# The rest of the variables are already evaled while probing
AC_MSG_NOTICE([Found $VS_DESCRIPTION])
break
fi
done
TOOLCHAIN_DESCRIPTION="$VS_DESCRIPTION"
if test "x$VS_SUPPORTED" = "xfalse"; then
UNSUPPORTED_TOOLCHAIN_VERSION=yes
fi
])
################################################################################
# Check if the VS env variables were setup prior to running configure.
# If not, then find vcvarsall.bat and run it automatically, and integrate
# the set env variables into the spec file.
AC_DEFUN([TOOLCHAIN_SETUP_VISUAL_STUDIO_ENV],
[
# Store path to cygwin link.exe to help excluding it when searching for
# VS linker. This must be done before changing the PATH when looking for VS.
AC_PATH_PROG(CYGWIN_LINK, link.exe)
if test "x$CYGWIN_LINK" != x; then
AC_MSG_CHECKING([if the first found link.exe is actually the Cygwin link tool])
"$CYGWIN_LINK" --version > /dev/null
if test $? -eq 0 ; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
# This might be the VS linker. Don't exclude it later on.
CYGWIN_LINK=""
fi
fi
# First-hand choice is to locate and run the vsvars bat file.
TOOLCHAIN_FIND_VISUAL_STUDIO
# If we have a devkit, skip all of the below.
if test "x$DEVKIT_VS_VERSION" = x; then
if test "x$VS_ENV_CMD" != x; then
# We have found a Visual Studio environment on disk, let's extract variables from the vsvars bat file.
UTIL_FIXUP_EXECUTABLE(VS_ENV_CMD)
# Lets extract the variables that are set by vcvarsall.bat/vsvars32.bat/vsvars64.bat
AC_MSG_NOTICE([Trying to extract Visual Studio environment variables])
# We need to create a couple of temporary files.
VS_ENV_TMP_DIR="$CONFIGURESUPPORT_OUTPUTDIR/vs-env"
$MKDIR -p $VS_ENV_TMP_DIR
# Cannot use the VS10 setup script directly (since it only updates the DOS subshell environment).
# Instead create a shell script which will set the relevant variables when run.
WINPATH_VS_ENV_CMD="$VS_ENV_CMD"
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([WINPATH_VS_ENV_CMD])
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
WINPATH_BASH="bash"
else
WINPATH_BASH="$BASH"
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([WINPATH_BASH])
fi
# Generate a DOS batch file which runs $VS_ENV_CMD, and then creates a shell
# script (executable by bash) that will setup the important variables.
EXTRACT_VC_ENV_BAT_FILE="$VS_ENV_TMP_DIR/extract-vs-env.bat"
$ECHO "@echo off" > $EXTRACT_VC_ENV_BAT_FILE
# This will end up something like:
# call C:/progra~2/micros~2.0/vc/bin/amd64/vcvars64.bat
$ECHO "call \"$WINPATH_VS_ENV_CMD\" $VS_ENV_ARGS" >> $EXTRACT_VC_ENV_BAT_FILE
# In some cases, the VS_ENV_CMD will change directory, change back so
# the set-vs-env.sh ends up in the right place.
$ECHO 'cd %~dp0' >> $EXTRACT_VC_ENV_BAT_FILE
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# These will end up something like:
# echo VS_PATH=\"$PATH\" > set-vs-env.sh
# The trailing space for everyone except PATH is no typo, but is needed due
# to trailing \ in the Windows paths. These will be stripped later.
# Trying pure CMD extract. This results in windows paths that need to
# be converted post extraction, but a simpler script.
$ECHO 'echo VS_PATH="%PATH%" > set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo VS_INCLUDE="%INCLUDE% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo VS_LIB="%LIB% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo VCINSTALLDIR="%VCINSTALLDIR% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo VCToolsRedistDir="%VCToolsRedistDir% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo WindowsSdkDir="%WindowsSdkDir% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO 'echo WINDOWSSDKDIR="%WINDOWSSDKDIR% " >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
else
# These will end up something like:
# C:/CygWin/bin/bash -c 'echo VS_PATH=\"$PATH\" > localdevenv.sh
# The trailing space for everyone except PATH is no typo, but is needed due
# to trailing \ in the Windows paths. These will be stripped later.
$ECHO "$WINPATH_BASH -c 'echo VS_PATH="'\"$PATH\" > set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo VS_INCLUDE="'\"$INCLUDE\;$include \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo VS_LIB="'\"$LIB\;$lib \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo VCINSTALLDIR="'\"$VCINSTALLDIR \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo VCToolsRedistDir="'\"$VCToolsRedistDir \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo WindowsSdkDir="'\"$WindowsSdkDir \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
$ECHO "$WINPATH_BASH -c 'echo WINDOWSSDKDIR="'\"$WINDOWSSDKDIR \" >> set-vs-env.sh' \
>> $EXTRACT_VC_ENV_BAT_FILE
fi
# Now execute the newly created bat file.
# Change directory so we don't need to mess with Windows paths in redirects.
cd $VS_ENV_TMP_DIR
$CMD /c extract-vs-env.bat > extract-vs-env.log 2>&1
cd $CONFIGURE_START_DIR
if test ! -s $VS_ENV_TMP_DIR/set-vs-env.sh; then
AC_MSG_NOTICE([Could not succesfully extract the environment variables needed for the VS setup.])
AC_MSG_NOTICE([Try setting --with-tools-dir to the VC/bin directory within the VS installation])
AC_MSG_NOTICE([or run "bash.exe -l" from a VS command prompt and then run configure from there.])
AC_MSG_ERROR([Cannot continue])
fi
# Remove windows line endings
$SED -i -e 's|\r||g' $VS_ENV_TMP_DIR/set-vs-env.sh
# Now set all paths and other env variables. This will allow the rest of
# the configure script to find and run the compiler in the proper way.
AC_MSG_NOTICE([Setting extracted environment variables])
. $VS_ENV_TMP_DIR/set-vs-env.sh
# Now we have VS_PATH, VS_INCLUDE, VS_LIB. For further checking, we
# also define VCINSTALLDIR, WindowsSdkDir and WINDOWSSDKDIR.
# In WSL, the extracted VS_PATH is Windows style. This needs to be
# rewritten as Unix style and the Windows style version is saved
# in VS_PATH_WINDOWS.
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
OLDIFS="$IFS"
IFS=";"
# Convert VS_PATH to unix style
VS_PATH_WINDOWS="$VS_PATH"
VS_PATH=""
for i in $VS_PATH_WINDOWS; do
path=$i
# Only process non-empty elements
if test "x$path" != x; then
IFS="$OLDIFS"
# Check that directory exists before calling fixup_path
testpath=$path
UTIL_REWRITE_AS_UNIX_PATH([testpath])
if test -d "$testpath"; then
UTIL_FIXUP_PATH([path])
UTIL_APPEND_TO_PATH(VS_PATH, $path)
fi
IFS=";"
fi
done
IFS="$OLDIFS"
fi
else
# We did not find a vsvars bat file, let's hope we are run from a VS command prompt.
AC_MSG_NOTICE([Cannot locate a valid Visual Studio installation, checking current environment])
fi
fi
# At this point, we should have correct variables in the environment, or we can't continue.
AC_MSG_CHECKING([for Visual Studio variables])
if test "x$VCINSTALLDIR" != x || test "x$WindowsSDKDir" != x \
|| test "x$WINDOWSSDKDIR" != x || test "x$DEVKIT_NAME" != x; then
if test "x$VS_INCLUDE" = x || test "x$VS_LIB" = x; then
AC_MSG_RESULT([present but broken])
AC_MSG_ERROR([Your VC command prompt seems broken, INCLUDE and/or LIB is missing.])
else
AC_MSG_RESULT([ok])
# Remove any trailing "\" ";" and " " from the variables.
VS_INCLUDE=`$ECHO "$VS_INCLUDE" | $SED -e 's/\\\\*;* *$//'`
VS_LIB=`$ECHO "$VS_LIB" | $SED 's/\\\\*;* *$//'`
VCINSTALLDIR=`$ECHO "$VCINSTALLDIR" | $SED 's/\\\\* *$//'`
VCToolsRedistDir=`$ECHO "$VCToolsRedistDir" | $SED 's/\\\\* *$//'`
WindowsSdkDir=`$ECHO "$WindowsSdkDir" | $SED 's/\\\\* *$//'`
WINDOWSSDKDIR=`$ECHO "$WINDOWSSDKDIR" | $SED 's/\\\\* *$//'`
if test -z "$WINDOWSSDKDIR"; then
WINDOWSSDKDIR="$WindowsSdkDir"
fi
# Remove any paths containing # (typically F#) as that messes up make. This
# is needed if visual studio was installed with F# support.
VS_PATH=`$ECHO "$VS_PATH" | $SED 's/[[^:#]]*#[^:]*://g'`
AC_SUBST(VS_PATH)
AC_SUBST(VS_INCLUDE)
AC_SUBST(VS_LIB)
# Convert VS_INCLUDE into SYSROOT_CFLAGS
OLDIFS="$IFS"
IFS=";"
for i in $VS_INCLUDE; do
ipath=$i
# Only process non-empty elements
if test "x$ipath" != x; then
IFS="$OLDIFS"
# Check that directory exists before calling fixup_path
testpath=$ipath
UTIL_REWRITE_AS_UNIX_PATH([testpath])
if test -d "$testpath"; then
UTIL_FIXUP_PATH([ipath])
SYSROOT_CFLAGS="$SYSROOT_CFLAGS -I$ipath"
fi
IFS=";"
fi
done
# Convert VS_LIB into SYSROOT_LDFLAGS
for i in $VS_LIB; do
libpath=$i
# Only process non-empty elements
if test "x$libpath" != x; then
IFS="$OLDIFS"
# Check that directory exists before calling fixup_path
testpath=$libpath
UTIL_REWRITE_AS_UNIX_PATH([testpath])
if test -d "$testpath"; then
UTIL_FIXUP_PATH([libpath])
SYSROOT_LDFLAGS="$SYSROOT_LDFLAGS -libpath:$libpath"
fi
IFS=";"
fi
done
IFS="$OLDIFS"
AC_SUBST(VS_PATH_WINDOWS)
fi
else
AC_MSG_RESULT([not found])
if test "x$VS_ENV_CMD" = x; then
AC_MSG_NOTICE([Cannot locate a valid Visual Studio or Windows SDK installation on disk,])
AC_MSG_NOTICE([nor is this script run from a Visual Studio command prompt.])
else
AC_MSG_NOTICE([Running the extraction script failed.])
fi
AC_MSG_NOTICE([Try setting --with-tools-dir to the VC/bin directory within the VS installation])
AC_MSG_NOTICE([or run "bash.exe -l" from a VS command prompt and then run configure from there.])
AC_MSG_ERROR([Cannot continue])
fi
])
AC_DEFUN([TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL],
[
DLL_NAME="$1"
POSSIBLE_MSVC_DLL="$2"
METHOD="$3"
if test -n "$POSSIBLE_MSVC_DLL" -a -e "$POSSIBLE_MSVC_DLL"; then
AC_MSG_NOTICE([Found $DLL_NAME at $POSSIBLE_MSVC_DLL using $METHOD])
# Need to check if the found msvcr is correct architecture
AC_MSG_CHECKING([found $DLL_NAME architecture])
MSVC_DLL_FILETYPE=`$FILE -b "$POSSIBLE_MSVC_DLL"`
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
# The MSYS 'file' command returns "PE32 executable for MS Windows (DLL) (GUI) Intel 80386 32-bit"
# on x32 and "PE32+ executable for MS Windows (DLL) (GUI) Mono/.Net assembly" on x64 systems.
if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
CORRECT_MSVCR_ARCH="PE32 executable"
else
CORRECT_MSVCR_ARCH="PE32+ executable"
fi
else
if test "x$OPENJDK_TARGET_CPU_BITS" = x32; then
CORRECT_MSVCR_ARCH=386
else
CORRECT_MSVCR_ARCH=x86-64
fi
fi
if $ECHO "$MSVC_DLL_FILETYPE" | $GREP "$CORRECT_MSVCR_ARCH" 2>&1 > /dev/null; then
AC_MSG_RESULT([ok])
MSVC_DLL="$POSSIBLE_MSVC_DLL"
AC_MSG_CHECKING([for $DLL_NAME])
AC_MSG_RESULT([$MSVC_DLL])
else
AC_MSG_RESULT([incorrect, ignoring])
AC_MSG_NOTICE([The file type of the located $DLL_NAME is $MSVC_DLL_FILETYPE])
fi
fi
])
AC_DEFUN([TOOLCHAIN_SETUP_MSVC_DLL],
[
DLL_NAME="$1"
MSVC_DLL=
if test "x$MSVC_DLL" = x; then
if test "x$VCINSTALLDIR" != x; then
CYGWIN_VC_INSTALL_DIR="$VCINSTALLDIR"
UTIL_FIXUP_PATH(CYGWIN_VC_INSTALL_DIR)
if test "$VS_VERSION" -lt 2017; then
# Probe: Using well-known location from Visual Studio 12.0 and older
if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
POSSIBLE_MSVC_DLL="$CYGWIN_VC_INSTALL_DIR/redist/x64/Microsoft.VC${VS_VERSION_INTERNAL}.CRT/$DLL_NAME"
else
POSSIBLE_MSVC_DLL="$CYGWIN_VC_INSTALL_DIR/redist/x86/Microsoft.VC${VS_VERSION_INTERNAL}.CRT/$DLL_NAME"
fi
else
CYGWIN_VC_TOOLS_REDIST_DIR="$VCToolsRedistDir"
UTIL_FIXUP_PATH(CYGWIN_VC_TOOLS_REDIST_DIR)
# Probe: Using well-known location from VS 2017 and VS 2019
if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
POSSIBLE_MSVC_DLL="`ls $CYGWIN_VC_TOOLS_REDIST_DIR/x64/Microsoft.VC${VS_VERSION_INTERNAL}.CRT/$DLL_NAME`"
else
POSSIBLE_MSVC_DLL="`ls $CYGWIN_VC_TOOLS_REDIST_DIR/x86/Microsoft.VC${VS_VERSION_INTERNAL}.CRT/$DLL_NAME`"
fi
fi
# In case any of the above finds more than one file, loop over them.
for possible_msvc_dll in $POSSIBLE_MSVC_DLL; do
$ECHO "POSSIBLE_MSVC_DLL $possible_msvc_dll"
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$possible_msvc_dll],
[well-known location in VCINSTALLDIR])
done
fi
fi
if test "x$MSVC_DLL" = x; then
# Probe: Check in the Boot JDK directory.
POSSIBLE_MSVC_DLL="$BOOT_JDK/bin/$DLL_NAME"
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[well-known location in Boot JDK])
fi
if test "x$MSVC_DLL" = x; then
# Probe: Look in the Windows system32 directory
CYGWIN_SYSTEMROOT="$SYSTEMROOT"
UTIL_REWRITE_AS_UNIX_PATH(CYGWIN_SYSTEMROOT)
POSSIBLE_MSVC_DLL="$CYGWIN_SYSTEMROOT/system32/$DLL_NAME"
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[well-known location in SYSTEMROOT])
fi
if test "x$MSVC_DLL" = x; then
# Probe: If Visual Studio Express is installed, there is usually one with the debugger
if test "x$VS100COMNTOOLS" != x; then
CYGWIN_VS_TOOLS_DIR="$VS100COMNTOOLS/.."
UTIL_REWRITE_AS_UNIX_PATH(CYGWIN_VS_TOOLS_DIR)
if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
POSSIBLE_MSVC_DLL=`$FIND "$CYGWIN_VS_TOOLS_DIR" -name $DLL_NAME \
| $GREP -i /x64/ | $HEAD --lines 1`
else
POSSIBLE_MSVC_DLL=`$FIND "$CYGWIN_VS_TOOLS_DIR" -name $DLL_NAME \
| $GREP -i /x86/ | $HEAD --lines 1`
fi
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[search of VS100COMNTOOLS])
fi
fi
if test "x$MSVC_DLL" = x; then
# Probe: Search wildly in the VCINSTALLDIR. We've probably lost by now.
# (This was the original behaviour; kept since it might turn something up)
if test "x$CYGWIN_VC_INSTALL_DIR" != x; then
if test "x$OPENJDK_TARGET_CPU_BITS" = x64; then
POSSIBLE_MSVC_DLL=`$FIND "$CYGWIN_VC_INSTALL_DIR" -name $DLL_NAME \
| $GREP x64 | $HEAD --lines 1`
else
POSSIBLE_MSVC_DLL=`$FIND "$CYGWIN_VC_INSTALL_DIR" -name $DLL_NAME \
| $GREP x86 | $GREP -v ia64 | $GREP -v x64 | $HEAD --lines 1`
if test "x$POSSIBLE_MSVC_DLL" = x; then
# We're grasping at straws now...
POSSIBLE_MSVC_DLL=`$FIND "$CYGWIN_VC_INSTALL_DIR" -name $DLL_NAME \
| $HEAD --lines 1`
fi
fi
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL([$DLL_NAME], [$POSSIBLE_MSVC_DLL],
[search of VCINSTALLDIR])
fi
fi
if test "x$MSVC_DLL" = x; then
AC_MSG_CHECKING([for $DLL_NAME])
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find $DLL_NAME. Please specify using --with-msvcr-dll.])
fi
])
AC_DEFUN([TOOLCHAIN_SETUP_VS_RUNTIME_DLLS],
[
AC_ARG_WITH(msvcr-dll, [AS_HELP_STRING([--with-msvcr-dll],
[path to microsoft C runtime dll (msvcr*.dll) (Windows only) @<:@probed@:>@])])
if test "x$with_msvcr_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCR_NAME, [$with_msvcr_dll], [--with-msvcr-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCR_NAME as specified by --with-msvcr-dll])
fi
MSVCR_DLL="$MSVC_DLL"
elif test "x$DEVKIT_MSVCR_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCR_NAME, [$DEVKIT_MSVCR_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCR_NAME as specified by devkit])
fi
MSVCR_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${MSVCR_NAME}])
MSVCR_DLL="$MSVC_DLL"
fi
AC_SUBST(MSVCR_DLL)
AC_ARG_WITH(msvcp-dll, [AS_HELP_STRING([--with-msvcp-dll],
[path to microsoft C++ runtime dll (msvcp*.dll) (Windows only) @<:@probed@:>@])])
if test "x$MSVCP_NAME" != "x"; then
if test "x$with_msvcp_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCP_NAME, [$with_msvcp_dll], [--with-msvcp-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCP_NAME as specified by --with-msvcp-dll])
fi
MSVCP_DLL="$MSVC_DLL"
elif test "x$DEVKIT_MSVCP_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($MSVCP_NAME, [$DEVKIT_MSVCP_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $MSVCP_NAME as specified by devkit])
fi
MSVCP_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${MSVCP_NAME}])
MSVCP_DLL="$MSVC_DLL"
fi
AC_SUBST(MSVCP_DLL)
fi
AC_ARG_WITH(vcruntime-1-dll, [AS_HELP_STRING([--with-vcruntime-1-dll],
[path to microsoft C++ runtime dll (vcruntime*_1.dll) (Windows only) @<:@probed@:>@])])
if test "x$VCRUNTIME_1_NAME" != "x"; then
if test "x$with_vcruntime_1_dll" != x; then
# If given explicitly by user, do not probe. If not present, fail directly.
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($VCRUNTIME_1_NAME, [$with_vcruntime_1_dll],
[--with-vcruntime-1-dll])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $VCRUNTIME_1_NAME as specified by --with-vcruntime-1-dll])
fi
VCRUNTIME_1_DLL="$MSVC_DLL"
elif test "x$DEVKIT_VCRUNTIME_1_DLL" != x; then
TOOLCHAIN_CHECK_POSSIBLE_MSVC_DLL($VCRUNTIME_1_NAME, [$DEVKIT_VCRUNTIME_1_DLL], [devkit])
if test "x$MSVC_DLL" = x; then
AC_MSG_ERROR([Could not find a proper $VCRUNTIME_1_NAME as specified by devkit])
fi
VCRUNTIME_1_DLL="$MSVC_DLL"
else
TOOLCHAIN_SETUP_MSVC_DLL([${VCRUNTIME_1_NAME}])
VCRUNTIME_1_DLL="$MSVC_DLL"
fi
AC_SUBST(VCRUNTIME_1_DLL)
fi
AC_ARG_WITH(ucrt-dll-dir, [AS_HELP_STRING([--with-ucrt-dll-dir],
[path to Microsoft Windows Kit UCRT DLL dir (Windows only) @<:@probed@:>@])])
if test "x$USE_UCRT" = "xtrue"; then
AC_MSG_CHECKING([for UCRT DLL dir])
if test "x$with_ucrt_dll_dir" != x; then
if test -z "$(ls -d "$with_ucrt_dll_dir/"*.dll 2> /dev/null)"; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find any dlls in $with_ucrt_dll_dir])
else
AC_MSG_RESULT([$with_ucrt_dll_dir])
UCRT_DLL_DIR="$with_ucrt_dll_dir"
UTIL_FIXUP_PATH([UCRT_DLL_DIR])
fi
elif test "x$DEVKIT_UCRT_DLL_DIR" != "x"; then
UCRT_DLL_DIR="$DEVKIT_UCRT_DLL_DIR"
AC_MSG_RESULT($UCRT_DLL_DIR)
else
CYGWIN_WINDOWSSDKDIR="${WINDOWSSDKDIR}"
UTIL_FIXUP_PATH([CYGWIN_WINDOWSSDKDIR])
dll_subdir=$OPENJDK_TARGET_CPU
if test "x$dll_subdir" = "xx86_64"; then
dll_subdir="x64"
fi
UCRT_DLL_DIR="$CYGWIN_WINDOWSSDKDIR/Redist/ucrt/DLLs/$dll_subdir"
if test -z "$(ls -d "$UCRT_DLL_DIR/"*.dll 2> /dev/null)"; then
# Try with version subdir
UCRT_DLL_DIR="`ls -d $CYGWIN_WINDOWSSDKDIR/Redist/*/ucrt/DLLs/$dll_subdir \
2> /dev/null | $SORT -d | $HEAD -n1`"
if test -z "$UCRT_DLL_DIR" \
|| test -z "$(ls -d "$UCRT_DLL_DIR/"*.dll 2> /dev/null)"; then
AC_MSG_RESULT([no])
AC_MSG_ERROR([Could not find any dlls in $UCRT_DLL_DIR])
else
AC_MSG_RESULT($UCRT_DLL_DIR)
fi
else
AC_MSG_RESULT($UCRT_DLL_DIR)
fi
fi
else
UCRT_DLL_DIR=
fi
AC_SUBST(UCRT_DLL_DIR)
])

View File

@@ -24,6 +24,7 @@
#
m4_include([util_paths.m4])
m4_include([util_windows.m4])
###############################################################################
# Create a function/macro that takes a series of named arguments. The call is
@@ -461,3 +462,151 @@ UTIL_DEFUN_NAMED([UTIL_ARG_ENABLE],
fi
])
###############################################################################
# Test that variable $1 denoting a program is not empty. If empty, exit with an error.
# $1: variable to check
AC_DEFUN([UTIL_CHECK_NONEMPTY],
[
if test "x[$]$1" = x; then
AC_MSG_ERROR([Could not find required tool for $1])
fi
])
###############################################################################
# Setup a tool for the given variable. If correctly specified by the user,
# use that value, otherwise search for the tool using the supplied code snippet.
# $1: variable to set
# $2: code snippet to call to look for the tool
# $3: code snippet to call if variable was used to find tool
AC_DEFUN([UTIL_SETUP_TOOL],
[
# Publish this variable in the help.
AC_ARG_VAR($1, [Override default value for $1])
if [[ -z "${$1+x}" ]]; then
# The variable is not set by user, try to locate tool using the code snippet
$2
else
# The variable is set, but is it from the command line or the environment?
# Try to remove the string !$1! from our list.
try_remove_var=${CONFIGURE_OVERRIDDEN_VARIABLES//!$1!/}
if test "x$try_remove_var" = "x$CONFIGURE_OVERRIDDEN_VARIABLES"; then
# If it failed, the variable was not from the command line. Ignore it,
# but warn the user (except for BASH, which is always set by the calling BASH).
if test "x$1" != xBASH; then
AC_MSG_WARN([Ignoring value of $1 from the environment. Use command line variables instead.])
fi
# Try to locate tool using the code snippet
$2
else
# If it succeeded, then it was overridden by the user. We will use it
# for the tool.
# First remove it from the list of overridden variables, so we can test
# for unknown variables in the end.
CONFIGURE_OVERRIDDEN_VARIABLES="$try_remove_var"
tool_override=[$]$1
AC_MSG_NOTICE([User supplied override $1="$tool_override"])
# Check if we try to supply an empty value
if test "x$tool_override" = x; then
AC_MSG_CHECKING([for $1])
AC_MSG_RESULT([disabled])
else
# Split up override in command part and argument part
tool_and_args=($tool_override)
[ tool_command=${tool_and_args[0]} ]
[ unset 'tool_and_args[0]' ]
[ tool_args=${tool_and_args[@]} ]
# Check if the provided tool contains a complete path.
tool_basename="${tool_command##*/}"
if test "x$tool_basename" = "x$tool_command"; then
# A command without a complete path is provided, search $PATH.
AC_MSG_NOTICE([Will search for user supplied tool "$tool_basename"])
AC_PATH_PROG($1, $tool_basename)
if test "x[$]$1" = x; then
AC_MSG_ERROR([User supplied tool $1="$tool_basename" could not be found])
fi
else
# Otherwise we believe it is a complete path. Use it as it is.
AC_MSG_NOTICE([Will use user supplied tool "$tool_command"])
AC_MSG_CHECKING([for $tool_command])
if test ! -x "$tool_command" && test ! -x "$tool_command.exe"; then
AC_MSG_RESULT([not found])
AC_MSG_ERROR([User supplied tool $1="$tool_command" does not exist or is not executable])
fi
$1="$tool_command"
AC_MSG_RESULT([found])
fi
if test "x$tool_args" != x; then
# If we got arguments, re-append them to the command after the fixup.
$1="[$]$1 $tool_args"
fi
fi
fi
$3
fi
])
###############################################################################
# Call UTIL_SETUP_TOOL with AC_PATH_PROGS to locate the tool
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_PATH_PROGS],
[
UTIL_SETUP_TOOL($1, [AC_PATH_PROGS($1, $2, , $3)])
])
###############################################################################
# Call UTIL_SETUP_TOOL with AC_CHECK_TOOLS to locate the tool
# $1: variable to set
# $2: executable name (or list of names) to look for
AC_DEFUN([UTIL_CHECK_TOOLS],
[
UTIL_SETUP_TOOL($1, [AC_CHECK_TOOLS($1, $2)])
])
###############################################################################
# Like UTIL_PATH_PROGS but fails if no tool was found.
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_REQUIRE_PROGS],
[
UTIL_PATH_PROGS($1, $2, , $3)
UTIL_CHECK_NONEMPTY($1)
])
###############################################################################
# Like UTIL_SETUP_TOOL but fails if no tool was found.
# $1: variable to set
# $2: autoconf macro to call to look for the special tool
AC_DEFUN([UTIL_REQUIRE_SPECIAL],
[
UTIL_SETUP_TOOL($1, [$2])
UTIL_CHECK_NONEMPTY($1)
])
###############################################################################
# Like UTIL_REQUIRE_PROGS but also allows for bash built-ins
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_REQUIRE_BUILTIN_PROGS],
[
UTIL_SETUP_TOOL($1, [AC_PATH_PROGS($1, $2, , $3)])
if test "x[$]$1" = x; then
AC_MSG_NOTICE([Required tool $2 not found in PATH, checking built-in])
if command -v $2 > /dev/null 2>&1; then
AC_MSG_NOTICE([Found $2 as shell built-in. Using it])
$1="$2"
else
AC_MSG_ERROR([Required tool $2 also not found as built-in.])
fi
fi
UTIL_CHECK_NONEMPTY($1)
])

View File

@@ -23,7 +23,6 @@
# questions.
#
###############################################################################
# Appends a string to a path variable, only adding the : when needed.
AC_DEFUN([UTIL_APPEND_TO_PATH],
[
@@ -36,7 +35,6 @@ AC_DEFUN([UTIL_APPEND_TO_PATH],
fi
])
###############################################################################
# Prepends a string to a path variable, only adding the : when needed.
AC_DEFUN([UTIL_PREPEND_TO_PATH],
[
@@ -49,6 +47,34 @@ AC_DEFUN([UTIL_PREPEND_TO_PATH],
fi
])
################################################################################
# This will make a path absolute. Assumes it's already a unix path. Also
# resolves ~ to homedir.
AC_DEFUN([UTIL_ABSOLUTE_PATH],
[
if test "x[$]$1" != x; then
new_path="[$]$1"
# Use eval to expand a potential ~. This technique does not work if there
# are spaces in the path (which is valid at this point on Windows), so only
# try to apply it if there is an actual ~ first in the path.
if [ [[ "$new_path" = "~"* ]] ]; then
eval new_path="$new_path"
if test ! -f "$new_path" && test ! -d "$new_path"; then
AC_MSG_ERROR([The new_path of $1, which resolves as "$new_path", is not found.])
fi
fi
if test -d "$new_path"; then
$1="`cd "$new_path"; $THEPWDCMD -L`"
else
dir="`$DIRNAME "$new_path"`"
base="`$BASENAME "$new_path"`"
$1="`cd "$dir"; $THEPWDCMD -L`/$base"
fi
fi
])
###############################################################################
# This will make sure the given variable points to a full and proper
# path. This means:
@@ -58,100 +84,31 @@ AC_DEFUN([UTIL_PREPEND_TO_PATH],
# 2) The path will be absolute, and it will be in unix-style (on
# cygwin).
# $1: The name of the variable to fix
# $2: if NOFAIL, errors will be silently ignored
AC_DEFUN([UTIL_FIXUP_PATH],
[
# Only process if variable expands to non-empty
path="[$]$1"
if test "x$path" != x; then
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
if test "x$2" = "xNOFAIL"; then
quiet_option="-q"
fi
imported_path=`$FIXPATH_BASE $quiet_option import "$path"`
$FIXPATH_BASE verify "$imported_path"
if test $? -ne 0; then
if test "x$2" != "xNOFAIL"; then
AC_MSG_ERROR([The path of $1, which resolves as "$path", could not be imported.])
else
imported_path=""
fi
fi
if test "x$imported_path" != "x$path"; then
$1="$imported_path"
fi
if test "x[$]$1" != x; then
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
UTIL_FIXUP_PATH_CYGWIN($1)
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
UTIL_FIXUP_PATH_MSYS($1)
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
UTIL_FIXUP_PATH_WSL($1)
else
[ if [[ "$path" =~ " " ]]; then ]
if test "x$2" != "xNOFAIL"; then
AC_MSG_NOTICE([The path of $1, which resolves as "$path", is invalid.])
AC_MSG_ERROR([Spaces are not allowed in this path.])
else
path=""
fi
fi
# Use eval to expand a potential ~.
eval new_path="$path"
if test ! -e "$new_path"; then
if test "x$2" != "xNOFAIL"; then
AC_MSG_ERROR([The path of $1, which resolves as "$new_path", is not found.])
else
new_path=""
fi
fi
# Make the path absolute
if test "x$new_path" != x; then
if test -d "$new_path"; then
path="`cd "$new_path"; pwd -L`"
else
dir="`$DIRNAME "$new_path"`"
base="`$BASENAME "$new_path"`"
path="`cd "$dir"; pwd -L`/$base"
fi
else
path=""
# We're on a unix platform. Hooray! :)
path="[$]$1"
has_space=`$ECHO "$path" | $GREP " "`
if test "x$has_space" != x; then
AC_MSG_NOTICE([The path of $1, which resolves as "$path", is invalid.])
AC_MSG_ERROR([Spaces are not allowed in this path.])
fi
UTIL_ABSOLUTE_PATH(path)
$1="$path"
fi
fi
])
###############################################################################
# Check if the given file is a unix-style or windows-style executable, that is,
# if it expects paths in unix-style or windows-style.
# Returns "windows" or "unix" in $RESULT.
AC_DEFUN([UTIL_CHECK_WINENV_EXEC_TYPE],
[
# For cygwin and msys2, if it's linked with the correct helper lib, it
# accept unix paths
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin" || \
test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys2"; then
linked_libs=`$LDD $1 2>&1`
if test $? -ne 0; then
# Non-binary files (e.g. shell scripts) are unix files
RESULT=unix
else
[ if [[ "$linked_libs" =~ $WINENV_MARKER_DLL ]]; then ]
RESULT=unix
else
RESULT=windows
fi
fi
elif test "x$OPENJDK_BUILD_OS" = "xwindows"; then
# On WSL, we can check if it is a PE file
file_type=`$FILE -b $1 2>&1`
[ if [[ $file_type =~ PE.*Windows ]]; then ]
RESULT=windows
else
RESULT=unix
fi
else
RESULT=unix
fi
])
###############################################################################
# This will make sure the given variable points to a executable
# with a full and proper path. This means:
@@ -164,331 +121,65 @@ AC_DEFUN([UTIL_CHECK_WINENV_EXEC_TYPE],
# If the input variable does not have a directory specification, then
# it need to be in the PATH.
# $1: The name of the variable to fix
# $2: Where to look for the command (replaces $PATH)
# $3: set to NOFIXPATH to skip prefixing FIXPATH, even if needed on platform
AC_DEFUN([UTIL_FIXUP_EXECUTABLE],
[
input="[$]$1"
# Only process if variable expands to non-empty
if test "x$input" != x; then
# First separate the path from the arguments. This will split at the first
# space.
[ if [[ "$OPENJDK_BUILD_OS" = "windows" && input =~ ^$FIXPATH ]]; then
line="${input#$FIXPATH }"
fixpath_prefix="$FIXPATH "
else
line="$input"
fixpath_prefix=""
fi ]
path="${line%% *}"
arguments="${line#"$path"}"
[ if ! [[ "$path" =~ /|\\ ]]; then ]
# This is a command without path (e.g. "gcc" or "echo")
command_type=`type -t "$path"`
if test "x$command_type" = xbuiltin || test "x$command_type" = xkeyword; then
# Shell builtin or keyword; we're done here
new_path="$path"
if test "x[$]$1" != x; then
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
UTIL_FIXUP_EXECUTABLE_CYGWIN($1)
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
UTIL_FIXUP_EXECUTABLE_MSYS($1)
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
UTIL_FIXUP_EXECUTABLE_WSL($1)
else
# We're on a unix platform. Hooray! :)
# First separate the path from the arguments. This will split at the first
# space.
complete="[$]$1"
path="${complete%% *}"
tmp="$complete EOL"
arguments="${tmp#* }"
# Cannot rely on the command "which" here since it doesn't always work.
is_absolute_path=`$ECHO "$path" | $GREP ^/`
if test -z "$is_absolute_path"; then
# Path to executable is not absolute. Find it.
IFS_save="$IFS"
IFS=:
for p in $PATH; do
if test -f "$p/$path" && test -x "$p/$path"; then
new_path="$p/$path"
break
fi
done
IFS="$IFS_save"
else
# Search in $PATH using bash built-in 'type -p'.
saved_path="$PATH"
if test "x$2" != x; then
PATH="$2"
fi
new_path=`type -p "$path"`
if test "x$new_path" = x && test "x$OPENJDK_BUILD_OS" = "xwindows"; then
# Try again with .exe
new_path="`type -p "$path.exe"`"
fi
PATH="$saved_path"
if test "x$new_path" = x; then
AC_MSG_NOTICE([The command for $1, which resolves as "$input", is not found in the PATH.])
AC_MSG_ERROR([Cannot locate $path])
fi
# This is an absolute path, we can use it without further modifications.
new_path="$path"
fi
else
# This is a path with slashes, don't look at $PATH
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
# fixpath.sh import will do all heavy lifting for us
new_path=`$FIXPATH_BASE import "$path"`
if test ! -e $new_path; then
# It failed, but maybe spaces were part of the path and not separating
# the command and argument. Retry using that assumption.
new_path=`$FIXPATH_BASE import "$input"`
if test ! -e $new_path; then
AC_MSG_NOTICE([The command for $1, which resolves as "$input", can not be found.])
AC_MSG_ERROR([Cannot locate $input])
fi
# It worked, clear all "arguments"
arguments=""
fi
else # on unix
# Make absolute
$1="$path"
UTIL_FIXUP_PATH($1, NOFAIL)
new_path="[$]$1"
if test ! -e $new_path; then
AC_MSG_NOTICE([The command for $1, which resolves as "$input", is not found])
[ if [[ "$path" =~ " " ]]; then ]
AC_MSG_NOTICE([This might be caused by spaces in the path, which is not allowed.])
fi
AC_MSG_ERROR([Cannot locate $path])
fi
if test ! -x $new_path; then
AC_MSG_NOTICE([The command for $1, which resolves as "$input", is not executable.])
AC_MSG_ERROR([Cannot execute command at $path])
fi
fi # end on unix
fi # end with or without slashes
# Now we have a usable command as new_path, with arguments in arguments
if test "x$OPENJDK_BUILD_OS" = "xwindows"; then
if test "x$fixpath_prefix" = x; then
# Only mess around if fixpath_prefix was not given
UTIL_CHECK_WINENV_EXEC_TYPE("$new_path")
if test "x$RESULT" = xwindows; then
fixpath_prefix="$FIXPATH "
# make sure we have an .exe suffix (but not two)
new_path="${new_path%.exe}.exe"
else
# If we have gotten a .exe suffix, remove it
new_path="${new_path%.exe}"
if test "x$new_path" = x; then
AC_MSG_NOTICE([The path of $1, which resolves as "$complete", is not found.])
has_space=`$ECHO "$complete" | $GREP " "`
if test "x$has_space" != x; then
AC_MSG_NOTICE([This might be caused by spaces in the path, which is not allowed.])
fi
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
fi
if test "x$3" = xNOFIXPATH; then
fixpath_prefix=""
fi
# Now join together the path and the arguments once again
new_complete="$fixpath_prefix$new_path$arguments"
$1="$new_complete"
fi
])
###############################################################################
# Setup a tool for the given variable. If correctly specified by the user,
# use that value, otherwise search for the tool using the supplied code snippet.
# $1: variable to set
# $2: code snippet to call to look for the tool
# $3: code snippet to call if variable was used to find tool
AC_DEFUN([UTIL_SETUP_TOOL],
[
# Publish this variable in the help.
AC_ARG_VAR($1, [Override default value for $1])
if [[ -z "${$1+x}" ]]; then
# The variable is not set by user, try to locate tool using the code snippet
$2
else
# The variable is set, but is it from the command line or the environment?
# Try to remove the string !$1! from our list.
try_remove_var=${CONFIGURE_OVERRIDDEN_VARIABLES//!$1!/}
if test "x$try_remove_var" = "x$CONFIGURE_OVERRIDDEN_VARIABLES"; then
# If it failed, the variable was not from the command line. Ignore it,
# but warn the user (except for BASH, which is always set by the calling BASH).
if test "x$1" != xBASH; then
AC_MSG_WARN([Ignoring value of $1 from the environment. Use command line variables instead.])
fi
# Try to locate tool using the code snippet
$2
if test "x$arguments" != xEOL; then
new_complete="$new_path ${arguments% *}"
else
# If it succeeded, then it was overridden by the user. We will use it
# for the tool.
# First remove it from the list of overridden variables, so we can test
# for unknown variables in the end.
CONFIGURE_OVERRIDDEN_VARIABLES="$try_remove_var"
tool_override=[$]$1
# Check if we try to supply an empty value
if test "x$tool_override" = x; then
AC_MSG_CHECKING([for $1])
AC_MSG_RESULT([[[disabled by user]]])
else
# Split up override in command part and argument part
tool_and_args=($tool_override)
[ tool_command=${tool_and_args[0]} ]
[ unset 'tool_and_args[0]' ]
[ tool_args=${tool_and_args[@]} ]
# Check if the provided tool contains a complete path.
tool_basename="${tool_command##*/}"
if test "x$tool_basename" = "x$tool_command"; then
# A command without a complete path is provided, search $PATH.
AC_MSG_NOTICE([Will search for user supplied tool "$tool_basename"])
AC_PATH_PROGS($1, $tool_basename ${tool_basename}.exe)
tool_command="[$]$1"
if test "x$tool_command" = x; then
AC_MSG_ERROR([User supplied tool $1="$tool_basename" could not be found in PATH])
fi
else
# Otherwise we believe it is a complete path. Use it as it is.
if test ! -x "$tool_command" && test ! -x "${tool_command}.exe"; then
AC_MSG_ERROR([User supplied tool $1="$tool_command" does not exist or is not executable])
fi
if test ! -x "$tool_command"; then
tool_command="${tool_command}.exe"
fi
$1="$tool_command"
fi
if test "x$tool_args" != x; then
# If we got arguments, re-append them to the command after the fixup.
$1="[$]$1 $tool_args"
fi
AC_MSG_CHECKING([for $1])
AC_MSG_RESULT([[$]$1 [[user supplied]]])
fi
fi
$3
fi
])
###############################################################################
# Locate a tool using proper methods.
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
# $4: set to NOFIXPATH to skip prefixing FIXPATH, even if needed on platform
AC_DEFUN([UTIL_LOOKUP_PROGS],
[
UTIL_SETUP_TOOL($1, [
$1=""
if test "x$3" != x; then
old_path="$PATH"
PATH="$3"
new_complete="$new_path"
fi
for name in $2; do
AC_MSG_CHECKING(for $name)
command_type=`type -t "$name"`
if test "x$command_type" = xbuiltin || test "x$command_type" = xkeyword; then
# Shell builtin or keyword; we're done here
full_path="$name"
$1="$full_path"
AC_MSG_RESULT([[$full_path [builtin]]])
break
else
# Search in $PATH
old_ifs="$IFS"
IFS=":"
for elem in $PATH; do
IFS="$old_ifs"
if test "x$elem" = x; then
continue
fi
full_path="$elem/$name"
if test ! -e "$full_path" && test "x$OPENJDK_BUILD_OS" = "xwindows"; then
# Try again with .exe
full_path="$elem/$name.exe"
fi
if test -x "$full_path" && test ! -d "$full_path" ; then
$1="$full_path"
UTIL_FIXUP_EXECUTABLE($1, $3, $4)
result="[$]$1"
# If we have FIXPATH enabled, strip all instances of it and prepend
# a single one, to avoid double fixpath prefixing.
if test "x$4" != xNOFIXPATH; then
[ if [[ $FIXPATH != "" && $result =~ ^"$FIXPATH " ]]; then ]
result="\$FIXPATH ${result#"$FIXPATH "}"
fi
fi
AC_MSG_RESULT([$result])
break 2;
fi
done
IFS="$old_ifs"
fi
AC_MSG_RESULT([[[not found]]])
done
if test "x$3" != x; then
PATH="$old_path"
if test "x$complete" != "x$new_complete"; then
$1="$new_complete"
AC_MSG_NOTICE([Rewriting $1 to "$new_complete"])
fi
])
])
###############################################################################
# Call UTIL_SETUP_TOOL with AC_CHECK_TOOLS to locate the tool. This will look
# first for cross-compilation tools.
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_LOOKUP_TOOLCHAIN_PROGS],
[
if test "x$ac_tool_prefix" = x; then
UTIL_LOOKUP_PROGS($1, $2, $3)
else
prefixed_names=$(for name in $2; do echo ${ac_tool_prefix}${name} $name; done)
UTIL_LOOKUP_PROGS($1, $prefixed_names, $3)
fi
])
###############################################################################
# Test that variable $1 denoting a program is not empty. If empty, exit with an error.
# $1: variable to check
AC_DEFUN([UTIL_CHECK_NONEMPTY],
[
if test "x[$]$1" = x; then
AC_MSG_ERROR([Could not find required tool for $1])
fi
])
###############################################################################
# Like UTIL_LOOKUP_PROGS but fails if no tool was found.
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_REQUIRE_PROGS],
[
UTIL_LOOKUP_PROGS($1, $2, $3)
UTIL_CHECK_NONEMPTY($1)
])
###############################################################################
# Like UTIL_LOOKUP_PROGS but fails if no tool was found.
# $1: variable to set
# $2: executable name (or list of names) to look for
# $3: [path]
AC_DEFUN([UTIL_REQUIRE_TOOLCHAIN_PROGS],
[
UTIL_LOOKUP_TOOLCHAIN_PROGS($1, $2, $3)
UTIL_CHECK_NONEMPTY($1)
])
###############################################################################
# Like UTIL_SETUP_TOOL but fails if no tool was found.
# $1: variable to set
# $2: autoconf macro to call to look for the special tool
AC_DEFUN([UTIL_REQUIRE_SPECIAL],
[
UTIL_SETUP_TOOL($1, [$2])
UTIL_CHECK_NONEMPTY($1)
# The special macro will return an absolute path, and is only used for
# unix tools. No further processing needed.
])
###############################################################################
# Add FIXPATH prefix to variable. Normally this is done by UTIL_LOOKUP_PROGS
# or UTIL_FIXUP_EXECUTABLE, but in some circumstances this has to be done
# explicitly, such as when the command in question does not exist yet.
#
# $1: variable to add fixpath to
AC_DEFUN([UTIL_ADD_FIXPATH],
[
if test "x$FIXPATH" != x; then
$1="$FIXPATH [$]$1"
fi
])
@@ -517,8 +208,8 @@ AC_DEFUN([UTIL_REMOVE_SYMBOLIC_LINKS],
sym_link_file=`$BASENAME [$]$1`
cd $sym_link_dir
# Use -P flag to resolve symlinks in directories.
cd `pwd -P`
sym_link_dir=`pwd -P`
cd `$THEPWDCMD -P`
sym_link_dir=`$THEPWDCMD -P`
# Resolve file symlinks
while test $COUNTER -lt 20; do
ISLINK=`$LS -l $sym_link_dir/$sym_link_file | $GREP '\->' | $SED -e 's/.*-> \(.*\)/\1/'`
@@ -529,7 +220,7 @@ AC_DEFUN([UTIL_REMOVE_SYMBOLIC_LINKS],
# Again resolve directory symlinks since the target of the just found
# link could be in a different directory
cd `$DIRNAME $ISLINK`
sym_link_dir=`pwd -P`
sym_link_dir=`$THEPWDCMD -P`
sym_link_file=`$BASENAME $ISLINK`
let COUNTER=COUNTER+1
done
@@ -538,3 +229,4 @@ AC_DEFUN([UTIL_REMOVE_SYMBOLIC_LINKS],
fi
fi
])

View File

@@ -0,0 +1,451 @@
#
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
AC_DEFUN([UTIL_REWRITE_AS_UNIX_PATH],
[
windows_path="[$]$1"
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
unix_path=`$CYGPATH -u "$windows_path"`
$1="$unix_path"
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
unix_path=`$ECHO "$windows_path" | $SED -e 's,^\\(.\\):,/\\1,g' -e 's,\\\\,/,g'`
$1="$unix_path"
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
# wslpath does not check the input, only call if an actual windows path was
# given.
if $ECHO "$windows_path" | $GREP -q ["^[a-zA-Z]:[\\\\/]"]; then
unix_path=`$WSLPATH -u "$windows_path"`
$1="$unix_path"
fi
fi
])
AC_DEFUN([UTIL_REWRITE_AS_WINDOWS_MIXED_PATH],
[
unix_path="[$]$1"
if test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.cygwin"; then
windows_path=`$CYGPATH -m "$unix_path"`
$1="$windows_path"
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.msys"; then
windows_path=`cmd //c echo $unix_path`
$1="$windows_path"
elif test "x$OPENJDK_BUILD_OS_ENV" = "xwindows.wsl"; then
windows_path=`$WSLPATH -m "$unix_path" 2>/dev/null`
if test $? -ne 0; then
dir=`dirname "$unix_path"`
base=`basename "$unix_path"`
windows_path=`$WSLPATH -m "$dir"`/"$base"
if test $? -ne 0; then
AC_MSG_ERROR([Cannot convert "$unix_path" to Windows path])
fi
fi
$1="$windows_path"
fi
])
# Helper function which possibly converts a path using DOS-style short mode.
# If so, the updated path is stored in $new_path.
# $1: The path to check
AC_DEFUN([UTIL_MAKE_WINDOWS_SPACE_SAFE_CYGWIN],
[
input_path="$1"
# Check if we need to convert this using DOS-style short mode. If the path
# contains just simple characters, use it. Otherwise (spaces, weird characters),
# take no chances and rewrite it.
# Note: m4 eats our [], so we need to use @<:@ and @:>@ instead.
has_forbidden_chars=`$ECHO "$input_path" | $GREP @<:@^-._/a-zA-Z0-9@:>@`
if test "x$has_forbidden_chars" != x; then
# Now convert it to mixed DOS-style, short mode (no spaces, and / instead of \)
shortmode_path=`$CYGPATH -s -m -a "$input_path"`
path_after_shortmode=`$CYGPATH -u "$shortmode_path"`
if test "x$path_after_shortmode" != "x$input_to_shortpath"; then
# Going to short mode and back again did indeed matter. Since short mode is
# case insensitive, let's make it lowercase to improve readability.
shortmode_path=`$ECHO "$shortmode_path" | $TR 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
# Now convert it back to Unix-style (cygpath)
input_path=`$CYGPATH -u "$shortmode_path"`
new_path="$input_path"
fi
fi
test_cygdrive_prefix=`$ECHO $input_path | $GREP ^/cygdrive/`
if test "x$test_cygdrive_prefix" = x; then
# As a simple fix, exclude /usr/bin since it's not a real path.
if test "x`$ECHO $1 | $GREP ^/usr/bin/`" = x; then
# The path is in a Cygwin special directory (e.g. /home). We need this converted to
# a path prefixed by /cygdrive for fixpath to work.
new_path="$CYGWIN_ROOT_PATH$input_path"
fi
fi
])
# Helper function which possibly converts a path using DOS-style short mode.
# If so, the updated path is stored in $new_path.
# $1: The path to check
AC_DEFUN([UTIL_MAKE_WINDOWS_SPACE_SAFE_MSYS],
[
input_path="$1"
# Check if we need to convert this using DOS-style short mode. If the path
# contains just simple characters, use it. Otherwise (spaces, weird characters),
# take no chances and rewrite it.
# Note: m4 eats our [], so we need to use @<:@ and @:>@ instead.
has_forbidden_chars=`$ECHO "$input_path" | $GREP @<:@^-_/:a-zA-Z0-9@:>@`
if test "x$has_forbidden_chars" != x; then
# Now convert it to mixed DOS-style, short mode (no spaces, and / instead of \)
new_path=`cmd /c "for %A in (\"$input_path\") do @echo %~sA"|$TR \\\\\\\\ / | $TR 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
fi
])
# Helper function which possibly converts a path using DOS-style short mode.
# If so, the updated path is stored in $new_path.
# $1: The path to check
AC_DEFUN([UTIL_MAKE_WINDOWS_SPACE_SAFE_WSL],
[
input_path="$1"
# Check if we need to convert this using DOS-style short mode. If the path
# contains just simple characters, use it. Otherwise (spaces, weird characters),
# take no chances and rewrite it.
# Note: m4 eats our [], so we need to use @<:@ and @:>@ instead.
has_forbidden_chars=`$ECHO "$input_path" | $GREP [[^-_/:a-zA-Z0-9\\.]]`
if test "x$has_forbidden_chars" != x; then
# Now convert it to mixed DOS-style, short mode (no spaces, and / instead of \)
TOPDIR_windows="$TOPDIR"
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([TOPDIR_windows])
# First convert to Windows path to make input valid for cmd
UTIL_REWRITE_AS_WINDOWS_MIXED_PATH([input_path])
# Reset PATH since it can contain a mix of WSL/linux paths and Windows paths from VS,
# which, in combination with WSLENV, will make the WSL layer complain
old_path="$PATH"
PATH=
new_path=`$CMD /c $TOPDIR_windows/make/scripts/windowsShortName.bat "$input_path" \
| $SED -e 's|\r||g' \
| $TR \\\\\\\\ / | $TR 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
# Rewrite back to unix style
PATH="$old_path"
UTIL_REWRITE_AS_UNIX_PATH([new_path])
fi
])
# FIXME: The UTIL_FIXUP_*_CYGWIN/MSYS is most likely too convoluted
# and could probably be heavily simplified. However, all changes in this
# area tend to need lot of testing in different scenarios, and in lack of
# proper unit testing, cleaning this up has not been deemed worth the effort
# at the moment.
AC_DEFUN([UTIL_FIXUP_PATH_CYGWIN],
[
# Input might be given as Windows format, start by converting to
# unix format.
path="[$]$1"
new_path=`$CYGPATH -u "$path"`
UTIL_ABSOLUTE_PATH(new_path)
# Cygwin tries to hide some aspects of the Windows file system, such that binaries are
# named .exe but called without that suffix. Therefore, "foo" and "foo.exe" are considered
# the same file, most of the time (as in "test -f"). But not when running cygpath -s, then
# "foo.exe" is OK but "foo" is an error.
#
# This test is therefore slightly more accurate than "test -f" to check for file precense.
# It is also a way to make sure we got the proper file name for the real test later on.
test_shortpath=`$CYGPATH -s -m "$new_path" 2> /dev/null`
if test "x$test_shortpath" = x; then
AC_MSG_NOTICE([The path of $1, which resolves as "$path", is invalid.])
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
# Call helper function which possibly converts this using DOS-style short mode.
# If so, the updated path is stored in $new_path.
UTIL_MAKE_WINDOWS_SPACE_SAFE_CYGWIN([$new_path])
if test "x$path" != "x$new_path"; then
$1="$new_path"
AC_MSG_NOTICE([Rewriting $1 to "$new_path"])
fi
])
AC_DEFUN([UTIL_FIXUP_PATH_MSYS],
[
path="[$]$1"
has_colon=`$ECHO $path | $GREP ^.:`
new_path="$path"
if test "x$has_colon" = x; then
# Not in mixed or Windows style, start by that.
new_path=`cmd //c echo $path`
fi
UTIL_ABSOLUTE_PATH(new_path)
UTIL_MAKE_WINDOWS_SPACE_SAFE_MSYS([$new_path])
UTIL_REWRITE_AS_UNIX_PATH(new_path)
if test "x$path" != "x$new_path"; then
$1="$new_path"
AC_MSG_NOTICE([Rewriting $1 to "$new_path"])
fi
# Save the first 10 bytes of this path to the storage, so fixpath can work.
all_fixpath_prefixes=("${all_fixpath_prefixes@<:@@@:>@}" "${new_path:0:10}")
])
AC_DEFUN([UTIL_FIXUP_PATH_WSL],
[
# Input might be given as Windows format, start by converting to
# unix format.
new_path="[$]$1"
UTIL_REWRITE_AS_UNIX_PATH([new_path])
UTIL_ABSOLUTE_PATH(new_path)
# Call helper function which possibly converts this using DOS-style short mode.
# If so, the updated path is stored in $new_path.
UTIL_MAKE_WINDOWS_SPACE_SAFE_WSL([$new_path])
if test "x$path" != "x$new_path"; then
$1="$new_path"
AC_MSG_NOTICE([Rewriting $1 to "$new_path"])
fi
])
AC_DEFUN([UTIL_FIXUP_EXECUTABLE_CYGWIN],
[
# First separate the path from the arguments. This will split at the first
# space.
complete="[$]$1"
path="${complete%% *}"
tmp="$complete EOL"
arguments="${tmp#* }"
# Input might be given as Windows format, start by converting to
# unix format.
new_path=`$CYGPATH -u "$path"`
# Now try to locate executable using which
new_path=`$WHICH "$new_path" 2> /dev/null`
# bat and cmd files are not always considered executable in cygwin causing which
# to not find them
if test "x$new_path" = x \
&& test "x`$ECHO \"$path\" | $GREP -i -e \"\\.bat$\" -e \"\\.cmd$\"`" != x \
&& test "x`$LS \"$path\" 2>/dev/null`" != x; then
new_path=`$CYGPATH -u "$path"`
fi
if test "x$new_path" = x; then
# Oops. Which didn't find the executable.
# The splitting of arguments from the executable at a space might have been incorrect,
# since paths with space are more likely in Windows. Give it another try with the whole
# argument.
path="$complete"
arguments="EOL"
new_path=`$CYGPATH -u "$path"`
new_path=`$WHICH "$new_path" 2> /dev/null`
# bat and cmd files are not always considered executable in cygwin causing which
# to not find them
if test "x$new_path" = x \
&& test "x`$ECHO \"$path\" | $GREP -i -e \"\\.bat$\" -e \"\\.cmd$\"`" != x \
&& test "x`$LS \"$path\" 2>/dev/null`" != x; then
new_path=`$CYGPATH -u "$path"`
fi
if test "x$new_path" = x; then
# It's still not found. Now this is an unrecoverable error.
AC_MSG_NOTICE([The path of $1, which resolves as "$complete", is not found.])
has_space=`$ECHO "$complete" | $GREP " "`
if test "x$has_space" != x; then
AC_MSG_NOTICE([You might be mixing spaces in the path and extra arguments, which is not allowed.])
fi
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
fi
# Cygwin tries to hide some aspects of the Windows file system, such that binaries are
# named .exe but called without that suffix. Therefore, "foo" and "foo.exe" are considered
# the same file, most of the time (as in "test -f"). But not when running cygpath -s, then
# "foo.exe" is OK but "foo" is an error.
#
# This test is therefore slightly more accurate than "test -f" to check for file presence.
# It is also a way to make sure we got the proper file name for the real test later on.
test_shortpath=`$CYGPATH -s -m "$new_path" 2> /dev/null`
if test "x$test_shortpath" = x; then
# Short path failed, file does not exist as specified.
# Try adding .exe or .cmd
if test -f "${new_path}.exe"; then
input_to_shortpath="${new_path}.exe"
elif test -f "${new_path}.cmd"; then
input_to_shortpath="${new_path}.cmd"
else
AC_MSG_NOTICE([The path of $1, which resolves as "$new_path", is invalid.])
AC_MSG_NOTICE([Neither "$new_path" nor "$new_path.exe/cmd" can be found])
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
else
input_to_shortpath="$new_path"
fi
# Call helper function which possibly converts this using DOS-style short mode.
# If so, the updated path is stored in $new_path.
new_path="$input_to_shortpath"
UTIL_MAKE_WINDOWS_SPACE_SAFE_CYGWIN([$input_to_shortpath])
# remove trailing .exe if any
new_path="${new_path/%.exe/}"
])
AC_DEFUN([UTIL_FIXUP_EXECUTABLE_MSYS],
[
# First separate the path from the arguments. This will split at the first
# space.
complete="[$]$1"
path="${complete%% *}"
tmp="$complete EOL"
arguments="${tmp#* }"
# Input might be given as Windows format, start by converting to
# unix format.
new_path="$path"
UTIL_REWRITE_AS_UNIX_PATH(new_path)
# Now try to locate executable using which
new_path=`$WHICH "$new_path" 2> /dev/null`
if test "x$new_path" = x; then
# Oops. Which didn't find the executable.
# The splitting of arguments from the executable at a space might have been incorrect,
# since paths with space are more likely in Windows. Give it another try with the whole
# argument.
path="$complete"
arguments="EOL"
new_path="$path"
UTIL_REWRITE_AS_UNIX_PATH(new_path)
new_path=`$WHICH "$new_path" 2> /dev/null`
# bat and cmd files are not always considered executable in MSYS causing which
# to not find them
if test "x$new_path" = x \
&& test "x`$ECHO \"$path\" | $GREP -i -e \"\\.bat$\" -e \"\\.cmd$\"`" != x \
&& test "x`$LS \"$path\" 2>/dev/null`" != x; then
new_path="$path"
UTIL_REWRITE_AS_UNIX_PATH(new_path)
fi
if test "x$new_path" = x; then
# It's still not found. Now this is an unrecoverable error.
AC_MSG_NOTICE([The path of $1, which resolves as "$complete", is not found.])
has_space=`$ECHO "$complete" | $GREP " "`
if test "x$has_space" != x; then
AC_MSG_NOTICE([You might be mixing spaces in the path and extra arguments, which is not allowed.])
fi
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
fi
# Now new_path has a complete unix path to the binary
if test "x`$ECHO $new_path | $GREP ^/bin/`" != x; then
# Keep paths in /bin as-is, but remove trailing .exe if any
new_path="${new_path/%.exe/}"
# Do not save /bin paths to all_fixpath_prefixes!
else
# Not in mixed or Windows style, start by that.
new_path=`cmd //c echo $new_path`
UTIL_MAKE_WINDOWS_SPACE_SAFE_MSYS([$new_path])
# Output is in $new_path
UTIL_REWRITE_AS_UNIX_PATH(new_path)
# remove trailing .exe if any
new_path="${new_path/%.exe/}"
# Save the first 10 bytes of this path to the storage, so fixpath can work.
all_fixpath_prefixes=("${all_fixpath_prefixes@<:@@@:>@}" "${new_path:0:10}")
fi
])
AC_DEFUN([UTIL_FIXUP_EXECUTABLE_WSL],
[
# First separate the path from the arguments. This will split at the first
# space.
complete="[$]$1"
path="${complete%% *}"
tmp="$complete EOL"
arguments="${tmp#* }"
# Input might be given as Windows format, start by converting to
# unix format.
new_path="$path"
UTIL_REWRITE_AS_UNIX_PATH([new_path])
# Now try to locate executable using which
new_path_bak="$new_path"
new_path=`$WHICH "$new_path" 2> /dev/null`
# bat and cmd files are not considered executable in WSL
if test "x$new_path" = x \
&& test "x`$ECHO \"$path\" | $GREP -i -e \"\\.bat$\" -e \"\\.cmd$\"`" != x \
&& test "x`$LS \"$path\" 2>/dev/null`" != x; then
new_path="$new_path_back"
fi
if test "x$new_path" = x; then
# Oops. Which didn't find the executable.
# The splitting of arguments from the executable at a space might have been incorrect,
# since paths with space are more likely in Windows. Give it another try with the whole
# argument.
path="$complete"
arguments="EOL"
new_path="$path"
UTIL_REWRITE_AS_UNIX_PATH([new_path])
new_path_bak="$new_path"
new_path=`$WHICH "$new_path" 2> /dev/null`
# bat and cmd files are not considered executable in WSL
if test "x$new_path" = x \
&& test "x`$ECHO \"$path\" | $GREP -i -e \"\\.bat$\" -e \"\\.cmd$\"`" != x \
&& test "x`$LS \"$path\" 2>/dev/null`" != x; then
new_path="$new_path_bak"
fi
if test "x$new_path" = x; then
# It's still not found. Now this is an unrecoverable error.
AC_MSG_NOTICE([The path of $1, which resolves as "$complete", is not found.])
has_space=`$ECHO "$complete" | $GREP " "`
if test "x$has_space" != x; then
AC_MSG_NOTICE([You might be mixing spaces in the path and extra arguments, which is not allowed.])
fi
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
fi
# In WSL, suffixes must be present for Windows executables
if test ! -f "$new_path"; then
# Try adding .exe or .cmd
if test -f "${new_path}.exe"; then
input_to_shortpath="${new_path}.exe"
elif test -f "${new_path}.cmd"; then
input_to_shortpath="${new_path}.cmd"
else
AC_MSG_NOTICE([The path of $1, which resolves as "$new_path", is invalid.])
AC_MSG_NOTICE([Neither "$new_path" nor "$new_path.exe/cmd" can be found])
AC_MSG_ERROR([Cannot locate the the path of $1])
fi
else
input_to_shortpath="$new_path"
fi
# Call helper function which possibly converts this using DOS-style short mode.
# If so, the updated path is stored in $new_path.
new_path="$input_to_shortpath"
UTIL_MAKE_WINDOWS_SPACE_SAFE_WSL([$input_to_shortpath])
])

View File

@@ -0,0 +1,55 @@
#
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# Default version, product, and vendor information to use,
# unless overridden by configure
DEFAULT_VERSION_FEATURE=16
DEFAULT_VERSION_INTERIM=0
DEFAULT_VERSION_UPDATE=0
DEFAULT_VERSION_PATCH=0
DEFAULT_VERSION_EXTRA1=0
DEFAULT_VERSION_EXTRA2=0
DEFAULT_VERSION_EXTRA3=0
DEFAULT_VERSION_DATE=2021-03-16
DEFAULT_VERSION_CLASSFILE_MAJOR=60 # "`$EXPR $DEFAULT_VERSION_FEATURE + 44`"
DEFAULT_VERSION_CLASSFILE_MINOR=0
DEFAULT_ACCEPTABLE_BOOT_VERSIONS="14 15 16"
DEFAULT_JDK_SOURCE_TARGET_VERSION=16
DEFAULT_PROMOTED_VERSION_PRE=ea
LAUNCHER_NAME=openjdk
PRODUCT_NAME=OpenJDK
PRODUCT_SUFFIX="Runtime Environment"
JDK_RC_PLATFORM_NAME=Platform
COMPANY_NAME=N/A
HOTSPOT_VM_DISTRO="OpenJDK"
VENDOR_URL=https://openjdk.java.net/
VENDOR_URL_BUG=https://bugreport.java.com/bugreport/
VENDOR_URL_VM_BUG=https://bugreport.java.com/bugreport/crash.jsp
# Might need better names for these
MACOSX_BUNDLE_NAME_BASE="OpenJDK"
MACOSX_BUNDLE_ID_BASE="net.java.openjdk"

View File

@@ -177,12 +177,6 @@ define SetupJavaCompilationBody
$$(error Must specify BIN (in $1))
endif
ifneq ($$($1_MODULE), )
$1_MODULE_SUBDIR := /$$($1_MODULE)
endif
$1_SAFE_NAME := $$(strip $$(subst /,_, $1))
ifeq ($$($1_SMALL_JAVA), )
# If unspecified, default to true
$1_SMALL_JAVA := true
@@ -208,42 +202,24 @@ define SetupJavaCompilationBody
# If unspecified, default to the new jdk we're building
$1_TARGET_RELEASE := $$(TARGET_RELEASE_BOOTJDK)
endif
else ifeq ($$($1_COMPILER), buildjdk)
$1_JAVAC_CMD := $$(BUILD_JAVAC)
ifeq ($$($1_TARGET_RELEASE), )
# If unspecified, default to the new jdk we're building
$1_TARGET_RELEASE := $$(TARGET_RELEASE_NEWJDK)
endif
else ifeq ($$($1_COMPILER), interim)
# Use java server if it is enabled, and the user does not want a specialized
# class path.
ifeq ($$(ENABLE_JAVAC_SERVER)+$$($1_CLASSPATH), true+)
$1_JAVAC := $$(INTERIM_LANGTOOLS_ARGS) -m jdk.compiler.interim/com.sun.tools.sjavac.Main
# Create a configuration file with the needed information for the javac
# server to function properly.
$1_JAVAC_SERVER_CONFIG := $$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$$($1_SAFE_NAME)-server.conf
# How to launch the server. This must use JAVA_DETACH, which is the "big" java
# with an ability to detach from fixpath (on Windows)
# This will be executed by the client, if needed.
$1_JAVAC_SERVER_CMD := $$(JAVA_DETACH) $$($1_JAVA_FLAGS) $$($1_JAVAC)
$1_ESCAPED_CMD := $$(subst $$(SPACE),%20,$$(subst $$(COMMA),%2C,$$(strip $$($1_JAVAC_SERVER_CMD))))
# The portfile contains the tcp/ip on which the server listens
# The port file contains the tcp/ip on which the server listens
# and the cookie necessary to talk to the server.
$1_JAVAC_PORT_FILE := $$(call FixPath, $$(JAVAC_SERVER_DIR)/server.port)
$1_JAVA_SERVER_FLAGS := --server:portfile=$$(JAVAC_SERVER_DIR)/server.port,sjavac=$$($1_ESCAPED_CMD)
# The servercmd specifies how to launch the server. This will be executed
# by the client, if needed.
$1_JAVAC_SERVER_CMD := $$(call FixPath, $$(JAVA) $$($1_JAVA_FLAGS) $$($1_JAVAC))
$1_CONFIG_VARDEPS := $$($1_JAVAC_PORT_FILE) $$($1_JAVAC_SERVER_CMD)
$1_CONFIG_VARDEPS_FILE := $$(call DependOnVariable, $1_CONFIG_VARDEPS, \
$$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$1.config_vardeps)
$$($1_JAVAC_SERVER_CONFIG): $$($1_CONFIG_VARDEPS_FILE)
$(ECHO) portfile=$$($1_JAVAC_PORT_FILE) > $$@
$(ECHO) servercmd=$$($1_JAVAC_SERVER_CMD) >> $$@
# Always use small java to launch client
$1_JAVAC_CMD := $$(JAVA_SMALL) $$($1_JAVA_FLAGS) $$($1_JAVAC) \
--server:conf=$$($1_JAVAC_SERVER_CONFIG)
# Always use small to launch client
$1_JAVAC_CMD := $$(JAVA_SMALL) $$($1_JAVA_FLAGS) $$($1_JAVAC) $$($1_JAVA_SERVER_FLAGS)
else
# No javac server
$1_JAVAC := $$(INTERIM_LANGTOOLS_ARGS) -m jdk.compiler.interim/com.sun.tools.javac.Main
@@ -269,7 +245,7 @@ define SetupJavaCompilationBody
# Tell javac to do exactly as told and no more
PARANOIA_FLAGS := -implicit:none -Xprefer:source -XDignore.symbol.file=true -encoding ascii
$1_FLAGS += -g -Xlint:all $$($1_TARGET_RELEASE) $$(PARANOIA_FLAGS) $$(JAVA_WARNINGS_ARE_ERRORS)
$1_FLAGS += -g -Xlint:all --doclint-format html5 $$($1_TARGET_RELEASE) $$(PARANOIA_FLAGS) $$(JAVA_WARNINGS_ARE_ERRORS)
$1_FLAGS += $$($1_JAVAC_FLAGS)
ifneq ($$($1_DISABLED_WARNINGS), )
@@ -280,6 +256,10 @@ define SetupJavaCompilationBody
$1_FLAGS += -cp $$(call PathList, $$($1_CLASSPATH))
endif
ifneq ($$($1_MODULE), )
$1_MODULE_SUBDIR := /$$($1_MODULE)
endif
# Make sure the dirs exist, or that one of the EXTRA_FILES, that may not
# exist yet, is in it.
$$(foreach d, $$($1_SRC), \
@@ -324,11 +304,9 @@ define SetupJavaCompilationBody
ifneq ($$($1_KEEP_DUPS), true)
# Remove duplicate source files by keeping the first found of each duplicate.
# This allows for automatic overrides with custom or platform specific versions
# source files. Need to call DoubleDollar as we have java classes with '$' in
# their names.
# source files.
$1_SRCS := $$(strip $$(foreach s, $$($1_SRCS), \
$$(eval relative_src := $$(call remove-prefixes, $$($1_SRC), \
$$(call DoubleDollar, $$(s)))) \
$$(eval relative_src := $$(call remove-prefixes, $$($1_SRC), $$(s))) \
$$(if $$($1_$$(relative_src)), \
, \
$$(eval $1_$$(relative_src) := 1) $$(s))))
@@ -344,6 +322,9 @@ define SetupJavaCompilationBody
$$(error No source files found for $1)
endif
else
$1_SAFE_NAME := $$(strip $$(subst /,_, $1))
# All files below META-INF are always copied.
$1_ALL_COPIES := $$(filter $$(addsuffix /META-INF%,$$($1_SRC)),$$($1_ALL_SRCS))
# Find all files to be copied from source to bin.
@@ -452,7 +433,7 @@ define SetupJavaCompilationBody
# Do the actual compilation
$$($1_COMPILE_TARGET): $$($1_SRCS) $$($1_FILELIST) $$($1_DEPENDS) \
$$($1_VARDEPS_FILE) $$($1_EXTRA_DEPS) $$($1_JAVAC_SERVER_CONFIG)
$$($1_VARDEPS_FILE) $$($1_EXTRA_DEPS)
$$(call MakeDir, $$(@D))
$$(call ExecuteWithLog, $$($1_BIN)$$($1_MODULE_SUBDIR)/_the.$$($1_SAFE_NAME)_batch, \
$$($1_JAVAC_CMD) $$($1_FLAGS) \

View File

@@ -77,10 +77,8 @@ ifeq ($(STATIC_LIBS), true)
FindStaticLib =
endif
# Returns the module specific java header dir if it exists.
# Param 1 - module name
GetJavaHeaderDir = \
$(if $(strip $1),$(wildcard $(SUPPORT_OUTPUTDIR)/headers/$(strip $1)))
$(wildcard $(SUPPORT_OUTPUTDIR)/headers/$(strip $1))
# Process a dir description such as "java.base:headers" into a set of proper absolute paths.
ProcessDir = \
@@ -125,27 +123,15 @@ JDK_RCFLAGS=$(RCFLAGS) \
SetupJdkLibrary = $(NamedParamsMacroTemplate)
define SetupJdkLibraryBody
ifeq ($$($1_OUTPUT_DIR), )
ifneq ($$(MODULE), )
$1_OUTPUT_DIR := $$(call FindLibDirForModule, $$(MODULE))
else
$$(error Must specify OUTPUT_DIR in a MODULE free context)
endif
$1_OUTPUT_DIR := $$(call FindLibDirForModule, $$(MODULE))
endif
ifeq ($$($1_OBJECT_DIR), )
ifneq ($$(MODULE), )
$1_OBJECT_DIR := $$(SUPPORT_OUTPUTDIR)/native/$$(MODULE)/lib$$($1_NAME)
else
$$(error Must specify OBJECT_DIR in a MODULE free context)
endif
$1_OBJECT_DIR := $$(SUPPORT_OUTPUTDIR)/native/$$(MODULE)/lib$$($1_NAME)
endif
ifeq ($$($1_SRC), )
ifneq ($$(MODULE), )
$1_SRC := $$(call FindSrcDirsForLib, $$(MODULE), $$($1_NAME))
else
$$(error Must specify SRC in a MODULE free context)
endif
$1_SRC := $$(call FindSrcDirsForLib, $$(MODULE), $$($1_NAME))
else
$1_SRC := $$(foreach dir, $$($1_SRC), $$(call ProcessDir, $$(dir)))
endif
@@ -179,8 +165,7 @@ define SetupJdkLibraryBody
ifneq ($$($1_HEADERS_FROM_SRC), false)
$1_SRC_HEADER_FLAGS := $$(addprefix -I, $$(wildcard $$($1_SRC)))
endif
# Add the module specific java header dir
# Always add the java header dir
$1_SRC_HEADER_FLAGS += $$(addprefix -I, $$(call GetJavaHeaderDir, $$(MODULE)))
ifneq ($$($1_EXTRA_HEADER_DIRS), )
@@ -218,19 +203,11 @@ define SetupJdkExecutableBody
$1_TYPE := EXECUTABLE
ifeq ($$($1_OUTPUT_DIR), )
ifneq ($$(MODULE), )
$1_OUTPUT_DIR := $$(call FindExecutableDirForModule, $$(MODULE))
else
$$(error Must specify OUTPUT_DIR in a MODULE free context)
endif
$1_OUTPUT_DIR := $$(call FindExecutableDirForModule, $$(MODULE))
endif
ifeq ($$($1_OBJECT_DIR), )
ifneq ($$(MODULE), )
$1_OBJECT_DIR := $$(SUPPORT_OUTPUTDIR)/native/$$(MODULE)/$$($1_NAME)
else
$$(error Must specify OBJECT_DIR in a MODULE free context)
endif
$1_OBJECT_DIR := $$(SUPPORT_OUTPUTDIR)/native/$$(MODULE)/$$($1_NAME)
endif
ifeq ($$($1_VERSIONINFO_RESOURCE), )

View File

@@ -64,9 +64,6 @@ define NEWLINE
endef
# Make sure we have a value (could be overridden on command line by caller)
CREATING_BUILDJDK ?= false
# Certain features only work in newer version of GNU Make. The build will still
# function in 3.81, but will be less performant.
ifeq (4.0, $(firstword $(sort 4.0 $(MAKE_VERSION))))
@@ -92,7 +89,7 @@ BUILDTIMESDIR=$(OUTPUTDIR)/make-support/build-times
# Record starting time for build of a sub repository.
define RecordStartTime
$(DATE) '+%Y %m %d %H %M %S' | $(AWK) '{ print $$1,$$2,$$3,$$4,$$5,$$6,($$4*3600+$$5*60+$$6) }' > $(BUILDTIMESDIR)/build_time_start_$(strip $1) && \
$(DATE) '+%Y %m %d %H %M %S' | $(NAWK) '{ print $$1,$$2,$$3,$$4,$$5,$$6,($$4*3600+$$5*60+$$6) }' > $(BUILDTIMESDIR)/build_time_start_$(strip $1) && \
$(DATE) '+%Y-%m-%d %H:%M:%S' > $(BUILDTIMESDIR)/build_time_start_$(strip $1)_human_readable
endef
@@ -100,10 +97,10 @@ endef
# easy to read format. Handles builds that cross midnight. Expects
# that a build will never take 24 hours or more.
define RecordEndTime
$(DATE) '+%Y %m %d %H %M %S' | $(AWK) '{ print $$1,$$2,$$3,$$4,$$5,$$6,($$4*3600+$$5*60+$$6) }' > $(BUILDTIMESDIR)/build_time_end_$(strip $1)
$(DATE) '+%Y %m %d %H %M %S' | $(NAWK) '{ print $$1,$$2,$$3,$$4,$$5,$$6,($$4*3600+$$5*60+$$6) }' > $(BUILDTIMESDIR)/build_time_end_$(strip $1)
$(DATE) '+%Y-%m-%d %H:%M:%S' > $(BUILDTIMESDIR)/build_time_end_$(strip $1)_human_readable
$(ECHO) `$(CAT) $(BUILDTIMESDIR)/build_time_start_$(strip $1)` `$(CAT) $(BUILDTIMESDIR)/build_time_end_$(strip $1)` $1 | \
$(AWK) '{ F=$$7; T=$$14; if (F > T) { T+=3600*24 }; D=T-F; H=int(D/3600); \
$(NAWK) '{ F=$$7; T=$$14; if (F > T) { T+=3600*24 }; D=T-F; H=int(D/3600); \
M=int((D-H*3600)/60); S=D-H*3600-M*60; printf("%02d:%02d:%02d %s\n",H,M,S,$$15); }' \
> $(BUILDTIMESDIR)/build_time_diff_$(strip $1)
endef
@@ -439,18 +436,37 @@ endif
# On Windows, converts a path from cygwin/unix style (e.g. /bin/foo) into
# "mixed mode" (e.g. c:/cygwin/bin/foo). On other platforms, return the path
# unchanged.
# This also converts a colon-separated list of paths to a semicolon-separated
# list.
# This is normally not needed since we use the FIXPATH prefix for command lines,
# but might be needed in certain circumstances.
ifeq ($(call isTargetOs, windows), true)
FixPath = \
$(strip $(subst \,\\, $(shell $(FIXPATH_BASE) print $(patsubst $(FIXPATH), , $1))))
ifeq ($(call isBuildOsEnv, windows.wsl), true)
FixPath = \
$(shell $(WSLPATH) -m $1)
else
FixPath = \
$(shell $(CYGPATH) -m $1)
endif
else
FixPath = \
$1
endif
################################################################################
# FixPathList
#
# On Windows, converts a cygwin/unix style path list (colon-separated) into
# the native format (mixed mode, semicolon-separated). On other platforms,
# return the path list unchanged.
################################################################################
ifeq ($(call isTargetOs, windows), true)
FixPathList = \
$(subst @,$(SPACE),$(subst $(SPACE),;,$(foreach entry,$(subst :,$(SPACE),\
$(subst $(SPACE),@,$(strip $1))),$(call FixPath, $(entry)))))
else
FixPathList = \
$1
endif
################################################################################
# DependOnVariable
#

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2014, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2014, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -27,48 +27,206 @@ ifndef _MODULES_GMK
_MODULES_GMK := 1
################################################################################
# Setup module sets for classloaders
#
# BOOT_MODULES are modules defined by the boot loader
# PLATFORM_MODULES are modules defined by the platform loader
# JRE_TOOL_MODULES are tools included in JRE and defined by the application loader
#
# All other modules not declared below are defined by the application loader
# and are not included in JRE.
include $(TOPDIR)/make/conf/module-loader-map.conf
BOOT_MODULES :=
PLATFORM_MODULES :=
JRE_TOOL_MODULES :=
UPGRADEABLE_MODULES :=
AGGREGATOR_MODULES :=
DOCS_MODULES :=
# Append platform-specific and upgradeable modules
PLATFORM_MODULES += $(PLATFORM_MODULES_$(OPENJDK_TARGET_OS)) \
$(UPGRADEABLE_PLATFORM_MODULES)
################################################################################
# Setup module sets for docs
include $(TOPDIR)/make/conf/docs-modules.conf
################################################################################
# Setup module sets needed by the build system
include $(TOPDIR)/make/conf/build-module-sets.conf
################################################################################
# Hook to include the corresponding custom file, if present.
# Allowing MODULE list extensions setup above.
$(eval $(call IncludeCustomExtension, common/Modules.gmk))
################################################################################
# Depending on the configuration, we might need to filter out some modules that
# normally should have been included
BOOT_MODULES += \
java.base \
java.datatransfer \
java.desktop \
java.instrument \
java.logging \
java.management \
java.management.rmi \
java.naming \
java.prefs \
java.rmi \
java.security.sasl \
java.xml \
jdk.incubator.foreign \
jdk.internal.vm.ci \
jdk.jfr \
jdk.management \
jdk.management.jfr \
jdk.management.agent \
jdk.net \
jdk.nio.mapmode \
jdk.sctp \
jdk.unsupported \
#
# to be deprivileged
BOOT_MODULES += \
jdk.naming.rmi \
#
# Modules that directly or indirectly requiring upgradeable modules
# should carefully be considered if it should be upgradeable or not.
UPGRADEABLE_MODULES += \
java.compiler \
jdk.aot \
jdk.internal.vm.compiler \
jdk.internal.vm.compiler.management \
#
AGGREGATOR_MODULES += \
java.se \
#
PLATFORM_MODULES += \
$(UPGRADEABLE_MODULES) \
$(AGGREGATOR_MODULES)
#
PLATFORM_MODULES += \
java.net.http \
java.scripting \
java.security.jgss \
java.smartcardio \
java.sql \
java.sql.rowset \
java.transaction.xa \
java.xml.crypto \
jdk.accessibility \
jdk.charsets \
jdk.crypto.cryptoki \
jdk.crypto.ec \
jdk.dynalink \
jdk.httpserver \
jdk.jsobject \
jdk.localedata \
jdk.naming.dns \
jdk.security.auth \
jdk.security.jgss \
jdk.xml.dom \
jdk.zipfs \
#
ifeq ($(call isTargetOs, windows), true)
PLATFORM_MODULES += jdk.crypto.mscapi
endif
JRE_TOOL_MODULES += \
jdk.jdwp.agent \
jdk.incubator.jpackage \
#
################################################################################
# DOCS_MODULES defines the root modules for javadoc generation.
# All of their `require transitive` modules directly and indirectly will be included.
DOCS_MODULES += \
java.se \
java.smartcardio \
jdk.accessibility \
jdk.attach \
jdk.charsets \
jdk.compiler \
jdk.crypto.cryptoki \
jdk.crypto.ec \
jdk.dynalink \
jdk.editpad \
jdk.hotspot.agent \
jdk.httpserver \
jdk.incubator.jpackage \
jdk.jartool \
jdk.javadoc \
jdk.jcmd \
jdk.jconsole \
jdk.jdeps \
jdk.jdi \
jdk.jdwp.agent \
jdk.jfr \
jdk.jlink \
jdk.jsobject \
jdk.jshell \
jdk.jstatd \
jdk.incubator.foreign \
jdk.localedata \
jdk.management \
jdk.management.agent \
jdk.management.jfr \
jdk.naming.dns \
jdk.naming.rmi \
jdk.net \
jdk.nio.mapmode \
jdk.sctp \
jdk.security.auth \
jdk.security.jgss \
jdk.xml.dom \
jdk.zipfs \
#
# These modules are included in the interim image which is used to run profiling
# before building the real images.
INTERIM_IMAGE_MODULES := java.base java.logging
LANGTOOLS_MODULES := \
java.compiler \
jdk.compiler \
jdk.javadoc \
jdk.jdeps \
jdk.jshell \
#
HOTSPOT_MODULES := \
jdk.aot \
jdk.hotspot.agent \
jdk.internal.vm.ci \
jdk.internal.vm.compiler \
jdk.internal.vm.compiler.management \
#
################################################################################
# Some platforms don't have the serviceability agent
ifeq ($(INCLUDE_SA), false)
MODULES_FILTER += jdk.hotspot.agent
endif
################################################################################
# Filter out jvmci specific modules if jvmci is disabled
ifeq ($(INCLUDE_JVMCI), false)
MODULES_FILTER += jdk.internal.vm.ci
endif
################################################################################
# Filter out Graal specific modules if Graal is disabled
ifeq ($(INCLUDE_GRAAL), false)
MODULES_FILTER += jdk.internal.vm.compiler
MODULES_FILTER += jdk.internal.vm.compiler.management
endif
################################################################################
# Filter out aot specific modules if aot is disabled
ifeq ($(ENABLE_AOT), false)
MODULES_FILTER += jdk.aot
endif
################################################################################
# jpackage is only on windows, macosx, and linux
ifeq ($(call isTargetOs, windows macosx linux), false)
MODULES_FILTER += jdk.jpackage
MODULES_FILTER += jdk.incubator.jpackage
endif
################################################################################
@@ -178,7 +336,7 @@ $(MODULE_DEPS_MAKEFILE): $(MODULE_INFOS) \
$(RM) $@
$(foreach m, $(MODULE_INFOS), \
( $(PRINTF) "DEPS_$(call GetModuleNameFromModuleInfo, $m) :=" && \
$(AWK) -v MODULE=$(call GetModuleNameFromModuleInfo, $m) '\
$(NAWK) -v MODULE=$(call GetModuleNameFromModuleInfo, $m) '\
BEGIN { if (MODULE != "java.base") printf(" java.base"); } \
/^ *requires/ { sub(/;/, ""); \
sub(/requires /, " "); \
@@ -188,11 +346,11 @@ $(MODULE_DEPS_MAKEFILE): $(MODULE_INFOS) \
sub(/\/\*.*\*\//, ""); \
gsub(/^ +\*.*/, ""); \
gsub(/ /, ""); \
gsub(/\r/, ""); \
gsub(/\r/, ""); \
printf(" %s", $$0) } \
END { printf("\n") }' $m && \
$(PRINTF) "TRANSITIVE_MODULES_$(call GetModuleNameFromModuleInfo, $m) :=" && \
$(AWK) -v MODULE=$(call GetModuleNameFromModuleInfo, $m) '\
$(NAWK) -v MODULE=$(call GetModuleNameFromModuleInfo, $m) '\
BEGIN { if (MODULE != "java.base") printf(" java.base"); } \
/^ *requires *transitive/ { \
sub(/;/, ""); \
@@ -202,7 +360,7 @@ $(MODULE_DEPS_MAKEFILE): $(MODULE_INFOS) \
sub(/\/\*.*\*\//, ""); \
gsub(/^ +\*.*/, ""); \
gsub(/ /, ""); \
gsub(/\r/, ""); \
gsub(/\r/, ""); \
printf(" %s", $$0) } \
END { printf("\n") }' $m \
) >> $@ $(NEWLINE))
@@ -250,7 +408,8 @@ FindTransitiveIndirectDepsForModules = \
# Upgradeable modules are those that are either defined as upgradeable or that
# require an upradeable module.
FindAllUpgradeableModules = \
$(sort $(filter-out $(MODULES_FILTER), $(UPGRADEABLE_PLATFORM_MODULES)))
$(sort $(filter-out $(MODULES_FILTER), $(UPGRADEABLE_MODULES)))
################################################################################
@@ -306,6 +465,7 @@ endef
# * JDK_MODULES
# * BOOT_MODULES
# * PLATFORM_MODULES
# * JRE_TOOL_MODULES
define ReadImportMetaData
IMPORTED_MODULES := $$(call FindImportedModules)
$$(foreach m, $$(IMPORTED_MODULES), \

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2011, 2021, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2011, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -72,10 +72,10 @@ define WriteCompileCommandsFragment
$(call LogInfo, Creating compile commands fragment for $(notdir $3))
$(call MakeDir, $(dir $1))
$(call WriteFile,{ \
"directory": "$(strip $(call FixPath, $2))"$(COMMA) \
"file": "$(strip $(call FixPath, $3))"$(COMMA) \
"directory": "$(strip $2)"$(COMMA) \
"file": "$(strip $3)"$(COMMA) \
"command": "$(strip $(subst $(DQUOTE),\$(DQUOTE),$(subst \,\\,\
$(subst $(FIXPATH),,$(call FixPath, $4)))))" \
$(subst $(FIXPATH),,$4))))" \
}$(COMMA), \
$1)
endef
@@ -163,7 +163,7 @@ $(eval $(call DefineNativeToolchain, TOOLCHAIN_BUILD_LINK_CXX, \
################################################################################
# Extensions of files handled by this macro.
NATIVE_SOURCE_EXTENSIONS := %.S %.c %.cpp %.cc %.m %.mm
NATIVE_SOURCE_EXTENSIONS := %.s %.S %.c %.cpp %.cc %.m %.mm
# Replaces native source extensions with the object file extension in a string.
# Param 1: the string containing source file names with extensions
@@ -175,6 +175,12 @@ $(strip \
)
endef
ifeq ($(call isBuildOsEnv, windows.cygwin), true)
UNIX_PATH_PREFIX := /cygdrive
else ifeq ($(call isBuildOsEnv, windows.msys), true)
UNIX_PATH_PREFIX :=
endif
# This pattern is used to transform the output of the microsoft CL compiler
# into a make syntax dependency file (.d)
WINDOWS_SHOWINCLUDE_SED_PATTERN := \
@@ -182,7 +188,7 @@ WINDOWS_SHOWINCLUDE_SED_PATTERN := \
-e 's|Note: including file: *||' \
-e 's|\r||g' \
-e 's|\\|/|g' \
-e 's|^\([a-zA-Z]\):|$(WINENV_PREFIX)/\1|g' \
-e 's|^\([a-zA-Z]\):|$(UNIX_PATH_PREFIX)/\1|g' \
-e '\|$(TOPDIR)|I !d' \
-e 's|$$$$| \\|g' \
#
@@ -231,25 +237,10 @@ ifeq ($(ALLOW_ABSOLUTE_PATHS_IN_OUTPUT)-$(FILE_MACRO_CFLAGS), false-)
) \
)
# When compiling with relative paths, the deps file may come out with relative
# paths, and that path may start with './'. First remove any leading ./, then
# add WORKSPACE_ROOT to any line not starting with /, while allowing for
# leading spaces. There may also be multiple entries on the same line, so start
# with splitting such lines.
# Non GNU sed (BSD on macosx) cannot substitue in literal \n using regex.
# Instead use a bash escaped literal newline. To avoid having unmatched quotes
# ruin the ability for an editor to properly syntax highlight this file, define
# that newline sequence as a separate variable and add the closing quote behind
# a comment.
sed_newline := \'$$'\n''#'
# When compiling with relative paths, the deps file comes out with relative
# paths.
define fix-deps-file
$(SED) \
-e 's|\([^ ]\) \{1,\}\([^\\:]\)|\1 \\$(sed_newline) \2|g' \
$1.tmp \
| $(SED) \
-e 's|^\([ ]*\)\./|\1|' \
-e '/^[ ]*[^/ ]/s|^\([ ]*\)|\1$(WORKSPACE_ROOT)/|' \
> $1
$(SED) -e 's|^\([ ]*\)|\1$(WORKSPACE_ROOT)|' $1.tmp > $1
endef
else
# By default the MakeCommandRelative macro does nothing.
@@ -349,36 +340,42 @@ define SetupCompileNativeFileBody
$1_FLAGS := $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) $$($1_BASE_CFLAGS) \
$$($1_OPT_CFLAGS) $$($1_CFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CC)
$1_DEP_FLAG := $(C_FLAG_DEPS)
else ifneq ($$(filter %.m, $$($1_FILENAME)), )
# Compile as an Objective-C file
$1_FLAGS := -x objective-c $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) \
$$($1_BASE_CFLAGS) $$($1_OPT_CFLAGS) $$($1_CFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CC)
else ifneq ($$(filter %.S, $$($1_FILENAME)), )
# Compile as preprocessed assembler file
$1_FLAGS := $(BASIC_ASFLAGS) $$($1_BASE_ASFLAGS)
$1_DEP_FLAG := $(C_FLAG_DEPS)
else ifneq ($$(filter %.s %.S, $$($1_FILENAME)), )
# Compile as assembler file
$1_FLAGS := $$($1_BASE_ASFLAGS)
$1_COMPILER := $(AS)
$1_DEP_FLAG :=
else ifneq ($$(filter %.cpp %.cc %.mm, $$($1_FILENAME)), )
# Compile as a C++ or Objective-C++ file
$1_FLAGS := $(CFLAGS_CCACHE) $$($1_USE_PCH_FLAGS) $$($1_BASE_CXXFLAGS) \
$$($1_OPT_CXXFLAGS) $$($1_CXXFLAGS) -c
$1_COMPILER := $$($$($1_BASE)_CXX)
$1_DEP_FLAG := $(CXX_FLAG_DEPS)
else
$$(error Internal error in NativeCompilation.gmk: no compiler for file $$($1_FILENAME))
endif
# And this is the dependency file for this obj file.
$1_DEPS_FILE := $$(patsubst %$(OBJ_SUFFIX),%.d,$$($1_OBJ))
# The dependency target file lists all dependencies as empty targets to
# avoid make error "No rule to make target" for removed files
$1_DEPS_TARGETS_FILE := $$(patsubst %$(OBJ_SUFFIX),%.d.targets,$$($1_OBJ))
ifeq ($$(filter %.s %.S, $$($1_FILENAME)), )
# And this is the dependency file for this obj file.
$1_DEPS_FILE := $$(patsubst %$(OBJ_SUFFIX),%.d,$$($1_OBJ))
# The dependency target file lists all dependencies as empty targets to
# avoid make error "No rule to make target" for removed files
$1_DEPS_TARGETS_FILE := $$(patsubst %$(OBJ_SUFFIX),%.d.targets,$$($1_OBJ))
# Only try to load individual dependency information files if the global
# file hasn't been loaded (could happen if make was interrupted).
ifneq ($$($$($1_BASE)_DEPS_FILE_LOADED), true)
# Include previously generated dependency information. (if it exists)
-include $$($1_DEPS_FILE)
-include $$($1_DEPS_TARGETS_FILE)
# Only try to load individual dependency information files if the global
# file hasn't been loaded (could happen if make was interrupted).
ifneq ($$($$($1_BASE)_DEPS_FILE_LOADED), true)
# Include previously generated dependency information. (if it exists)
-include $$($1_DEPS_FILE)
-include $$($1_DEPS_TARGETS_FILE)
endif
endif
ifneq ($$(strip $$($1_CFLAGS) $$($1_CXXFLAGS) $$($1_OPTIMIZATION)), )
@@ -399,7 +396,7 @@ define SetupCompileNativeFileBody
$$(call MakeDir, $$(@D))
ifneq ($(TOOLCHAIN_TYPE), microsoft)
$$(call ExecuteWithLog, $$@, $$(call MakeCommandRelative, \
$$($1_COMPILER) $$(GENDEPS_FLAGS) \
$$($1_COMPILER) $$($1_DEP_FLAG) \
$$(addsuffix .tmp, $$($1_DEPS_FILE)) \
$$($1_COMPILE_OPTIONS)))
ifneq ($$($1_DEPS_FILE), )
@@ -418,25 +415,15 @@ define SetupCompileNativeFileBody
# Keep as much as possible on one execution line for best performance
# on Windows. No need to save exit code from compilation since
# pipefail is always active on Windows.
ifeq ($$(filter %.S, $$($1_FILENAME)), )
$$(call ExecuteWithLog, $$@, $$(call MakeCommandRelative, \
$$($1_COMPILER) -showIncludes $$($1_COMPILE_OPTIONS))) \
| $(TR) -d '\r' | $(GREP) -v -e "^Note: including file:" \
-e "^$$($1_FILENAME)$$$$" || test "$$$$?" = "1" ; \
$(ECHO) $$@: \\ > $$($1_DEPS_FILE) ; \
$(SED) $(WINDOWS_SHOWINCLUDE_SED_PATTERN) $$($1_OBJ).log \
| $(SORT) -u >> $$($1_DEPS_FILE) ; \
$(ECHO) >> $$($1_DEPS_FILE) ; \
$(SED) $(DEPENDENCY_TARGET_SED_PATTERN) $$($1_DEPS_FILE) > $$($1_DEPS_TARGETS_FILE)
else
# For assembler calls just create empty dependency lists
$$(call ExecuteWithLog, $$@, $$(call MakeCommandRelative, \
$$($1_COMPILER) $$($1_FLAGS) \
$(CC_OUT_OPTION)$$($1_OBJ) -Ta $$($1_SRC_FILE))) \
| $(TR) -d '\r' | $(GREP) -v -e "Assembling:" || test "$$$$?" = "1" ; \
$(ECHO) > $$($1_DEPS_FILE) ; \
$(ECHO) > $$($1_DEPS_TARGETS_FILE)
endif
$$(call ExecuteWithLog, $$@, $$(call MakeCommandRelative, \
$$($1_COMPILER) -showIncludes $$($1_COMPILE_OPTIONS))) \
| $(TR) -d '\r' | $(GREP) -v -e "^Note: including file:" \
-e "^$$($1_FILENAME)$$$$" || test "$$$$?" = "1" ; \
$(ECHO) $$@: \\ > $$($1_DEPS_FILE) ; \
$(SED) $(WINDOWS_SHOWINCLUDE_SED_PATTERN) $$($1_OBJ).log \
| $(SORT) -u >> $$($1_DEPS_FILE) ; \
$(ECHO) >> $$($1_DEPS_FILE) ; \
$(SED) $(DEPENDENCY_TARGET_SED_PATTERN) $$($1_DEPS_FILE) > $$($1_DEPS_TARGETS_FILE)
endif
endif
endef
@@ -540,7 +527,7 @@ define SetupNativeCompilationBody
ifeq ($$($1_TYPE), EXECUTABLE)
$1_PREFIX :=
ifeq ($$($1_SUFFIX), )
$1_SUFFIX := $(EXECUTABLE_SUFFIX)
$1_SUFFIX := $(EXE_SUFFIX)
endif
else
$1_PREFIX := $(LIBRARY_PREFIX)
@@ -818,15 +805,13 @@ define SetupNativeCompilationBody
-include $$($1_PCH_DEPS_TARGETS_FILE)
$1_PCH_COMMAND := $$($1_CC) $$($1_CFLAGS) $$($1_EXTRA_CFLAGS) $$($1_SYSROOT_CFLAGS) \
$$($1_OPT_CFLAGS) -x c++-header -c $(GENDEPS_FLAGS) \
$$(addsuffix .tmp, $$($1_PCH_DEPS_FILE))
$$($1_OPT_CFLAGS) -x c++-header -c $(C_FLAG_DEPS) $$($1_PCH_DEPS_FILE)
$$($1_PCH_FILE): $$($1_PRECOMPILED_HEADER) $$($1_COMPILE_VARDEPS_FILE)
$$(call LogInfo, Generating precompiled header)
$$(call MakeDir, $$(@D))
$$(call ExecuteWithLog, $$@, $$(call MakeCommandRelative, \
$$($1_PCH_COMMAND) $$< -o $$@))
$$(call fix-deps-file, $$($1_PCH_DEPS_FILE))
$(SED) $(DEPENDENCY_TARGET_SED_PATTERN) $$($1_PCH_DEPS_FILE) \
> $$($1_PCH_DEPS_TARGETS_FILE)
@@ -978,13 +963,6 @@ define SetupNativeCompilationBody
$(CD) $$($1_SYMBOLS_DIR) && \
$$($1_OBJCOPY) --add-gnu-debuglink=$$($1_DEBUGINFO_FILES) $$($1_TARGET)
else ifeq ($(call isTargetOs, aix), true)
# AIX does not provide the equivalent of OBJCOPY to extract debug symbols,
# so we copy the compiled object with symbols to the .debuginfo file, which
# happens prior to the STRIP_CMD on the original target object file.
$1_DEBUGINFO_FILES := $$($1_SYMBOLS_DIR)/$$($1_NOSUFFIX).debuginfo
$1_CREATE_DEBUGINFO_CMDS := $(CP) $$($1_TARGET) $$($1_DEBUGINFO_FILES)
else ifeq ($(call isTargetOs, macosx), true)
$1_DEBUGINFO_FILES := \
$$($1_SYMBOLS_DIR)/$$($1_BASENAME).dSYM/Contents/Info.plist \
@@ -1168,9 +1146,6 @@ define SetupNativeCompilationBody
test "$$$$?" = "1" ; \
$$($1_CREATE_DEBUGINFO_CMDS)
$$($1_STRIP_CMD)
ifeq ($(call isBuildOsEnv, windows.wsl2), true)
$$(CHMOD) +x $$($1_TARGET)
endif
else
$$(call ExecuteWithLog, $$($1_OBJECT_DIR)/$$($1_SAFE_NAME)_link, \
$$(if $$($1_LINK_OBJS_RELATIVE), $$(CD) $$(OUTPUTDIR) ; ) \
@@ -1188,7 +1163,7 @@ define SetupNativeCompilationBody
# This only works if the openjdk_codesign identity is present on the system. Let
# silently fail otherwise.
ifneq ($(CODESIGN), )
$(CODESIGN) -f -s "$(MACOSX_CODESIGN_IDENTITY)" --timestamp --options runtime \
$(CODESIGN) -s "$(MACOSX_CODESIGN_IDENTITY)" --timestamp --options runtime \
--entitlements $$(call GetEntitlementsFile, $$@) $$@
endif
endif

View File

@@ -81,7 +81,7 @@ define SetupTestFilesCompilationBody
# Locate all files with the matching prefix
$1_FILE_LIST := \
$$(call FindFiles, $$($1_SOURCE_DIRS), $$($1_PREFIX)*.c $$($1_PREFIX)*.cpp $$($1_PREFIX)*.m)
$$(call FindFiles, $$($1_SOURCE_DIRS), $$($1_PREFIX)*.c $$($1_PREFIX)*.cpp)
$1_EXCLUDE_PATTERN := $$(addprefix %/, $$($1_EXCLUDE))
$1_FILTERED_FILE_LIST := $$(filter-out $$($1_EXCLUDE_PATTERN), $$($1_FILE_LIST))

View File

@@ -1,5 +1,5 @@
#
# Copyright (c) 2013, 2020, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -177,7 +177,7 @@ define SetupTextFileProcessingBody
$1_INCLUDES_PARTIAL_AWK := $$(subst $$(SPACE);,,$$(subst $$(SPACE)=>$$(SPACE),"$$(RIGHT_PAREN)$$(RIGHT_PAREN) \
{ include$$(LEFT_PAREN)",$$(subst $$(SPACE);$$(SPACE),"$$(RIGHT_PAREN) } \
else if $$(LEFT_PAREN)matches$$(LEFT_PAREN)",$$(strip $$($1_INCLUDES)))))
$1_INCLUDES_COMMAND_LINE := $(AWK) '$$($1_INCLUDES_HEADER_AWK) \
$1_INCLUDES_COMMAND_LINE := $(NAWK) '$$($1_INCLUDES_HEADER_AWK) \
{ if (matches("$$($1_INCLUDES_PARTIAL_AWK)") } else print }'
else
# We don't have any includes, just pipe the file through cat.

View File

@@ -92,58 +92,40 @@ SetIfEmpty = \
# foo/bar/baz, /foo/bar -> <empty>
#
# The x prefix is used to preserve the presence of the initial slash
# On Windows paths are treated as case-insensitive
#
# $1 - Path to compare
# $2 - Other path to compare
FindCommonPathPrefix = \
$(call DecodeSpace,$(patsubst x%,%,$(subst $(SPACE),/,$(strip \
$(call FindCommonPathPrefixHelper1, \
$(subst /,$(SPACE),x$(call EncodeSpace,$(strip $1))), \
$(subst /,$(SPACE),x$(call EncodeSpace,$(strip $2)))) \
))))
$(patsubst x%,%,$(subst $(SPACE),/,$(strip \
$(call FindCommonPathPrefixHelper, \
$(subst /,$(SPACE),x$(strip $1)), $(subst /,$(SPACE),x$(strip $2))) \
)))
FindCommonPathPrefixHelper1 = \
$(if $(filter $(OPENJDK_TARGET_OS), windows), \
$(call FindCommonPathPrefixHelper2,$(call uppercase,$1),$(call uppercase,$2),$1), \
$(call FindCommonPathPrefixHelper2,$1,$2,$1))
FindCommonPathPrefixHelper2 = \
FindCommonPathPrefixHelper = \
$(if $(call equals, $(firstword $1), $(firstword $2)), \
$(if $(call equals, $(firstword $1),),, \
$(firstword $3) \
$(call FindCommonPathPrefixHelper2, \
$(wordlist 2, $(words $1), $1), \
$(wordlist 2, $(words $2), $2), \
$(wordlist 2, $(words $3), $3) \
) \
$(firstword $1) \
$(call FindCommonPathPrefixHelper, \
$(wordlist 2, $(words $1), $1), $(wordlist 2, $(words $2), $2) \
) \
)
# Convert a partial path into as many directory levels of ../, removing
# leading and following /.
# Ex: foo/bar/baz/ -> ../../..
# foo/bar -> ../..
# /foo -> ..
DirToDotDot = \
$(subst $(SPACE),/,$(foreach d, $(subst /,$(SPACE),$1),..))
# Computes the relative path from a directory to a file
# $1 - File to compute the relative path to
# $2 - Directory to compute the relative path from
RelativePath = \
$(call DecodeSpace,$(strip $(call RelativePathHelper,$(call EncodeSpace \
,$(strip $1)),$(call EncodeSpace \
,$(strip $2)),$(call EncodeSpace \
,$(call FindCommonPathPrefix,$1,$2)))))
RelativePathHelper = \
$(eval $3_prefix_length := $(words $(subst /,$(SPACE),$3))) \
$(eval $1_words := $(subst /,$(SPACE),$1)) \
$(eval $2_words := $(subst /,$(SPACE),$2)) \
$(if $(call equals,$($3_prefix_length),0),, \
$(eval $1_words := $(wordlist 2,$(words $($1_words)),$(wordlist \
$($3_prefix_length),$(words $($1_words)),$($1_words)))) \
$(eval $2_words := $(wordlist 2,$(words $($2_words)),$(wordlist \
$($3_prefix_length),$(words $($2_words)),$($2_words)))) \
) \
$(eval $1_suffix := $(subst $(SPACE),/,$($1_words))) \
$(eval $2_dotdots := $(subst $(SPACE),/,$(foreach d,$($2_words),..))) \
$(if $($1_suffix), \
$(if $($2_dotdots), $($2_dotdots)/$($1_suffix), $($1_suffix)), \
$(if $($2_dotdots), $($2_dotdots), .))
$(eval $1_prefix := $(call FindCommonPathPrefix, $1, $2)) \
$(eval $1_dotdots := $(call DirToDotDot, $(patsubst $($(strip $1)_prefix)%, %, $2))) \
$(eval $1_dotdots := $(if $($(strip $1)_dotdots),$($(strip $1)_dotdots),.)) \
$(eval $1_suffix := $(patsubst $($(strip $1)_prefix)/%, %, $1)) \
$($(strip $1)_dotdots)/$($(strip $1)_suffix)
################################################################################
# Filter out duplicate sub strings while preserving order. Keeps the first occurance.
@@ -296,7 +278,7 @@ FindExecutableDirForModule = \
# param 1 : A space separated list of classpath entries
# The surrounding strip is needed to keep additional whitespace out
PathList = \
"$(subst $(SPACE),:,$(strip $(subst $(DQUOTE),,$1)))"
"$(subst $(SPACE),$(PATH_SEP),$(strip $(subst $(DQUOTE),,$1)))"
################################################################################
# Check if a specified hotspot variant is being built, or at least one of a

Some files were not shown because too many files have changed in this diff Show More