diff --git a/BUILDING.md b/BUILDING.md
index cf563ee61..e3502c0a8 100644
--- a/BUILDING.md
+++ b/BUILDING.md
@@ -41,6 +41,9 @@ Override build-steps ? (leave empty to run pre-configured steps): j2v8
The J2V8 build-system performs several build steps in a fixed order to produce the final J2V8 packages for usage on the designated target platforms. What follows is a short summary for what each of the executed build-steps does and what output artifacts are produced by each step.
+```
+Node.js --> CMake --> JNI --> C++ --> Optimize --> Java/Android --> JUnit
+```
---
## Node.js
@@ -56,9 +59,10 @@ __Inputs:__
__Artifacts:__
- Node.js & V8 static link libraries
- `./node/out/`
- - `./node/build/`
- - `./node/Debug/`
- - `./node/Release/`
+ - *win32 specific*
+ - `./node/build/`
+ - `./node/Debug/`
+ - `./node/Release/`
---
## CMake
@@ -75,12 +79,25 @@ __Artifacts:__
- CMake generated Makefiles / IDE Project-files
- `./cmake.out/{platform}.{architecture}/`
---
-## JNI
+## JNI Header Generation
+
+Generate the JNI glue header file from the native method definitions of the Java `V8` class.
+
+__Inputs__:
+- Java V8.class file
+ - `./target/classes/com/eclipsesource/v8/V8.class`
+
+__Artifacts:__
+- J2V8 C++ JNI header file
+ - `./jni/com_eclipsesource_v8_V8Impl.h`
+---
+## C++
-Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8.
+Compile and link the J2V8 native shared libraries (.so/.dylib/.dll), which contain the C++ JNI bridge code to interop with the embedded Node.js / V8 parts.
__Inputs__:
- CMake generated Makefiles / IDE Project-files
+- Node.js / V8 static link libraries & C++ header files
- J2V8 C++ JNI source code
- `./jni/com_eclipsesource_v8_V8Impl.h`
- `./jni/com_eclipsesource_v8_V8Impl.cpp`
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 308fead1a..0f16ad7aa 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -34,8 +34,8 @@ set(J2V8_JDK_DIR ${Java_ROOT} CACHE STRING "Path to the Java JDK dependency")
set(J2V8_NODEJS_DIR "${CMAKE_SOURCE_DIR}/node" CACHE STRING "Path to the Node.js dependency")
# get the required Node.js link libraries
-get_njs_libs(${J2V8_NODEJS_DIR} "Debug")
-get_njs_libs(${J2V8_NODEJS_DIR} "Release")
+get_njs_libs(${J2V8_NODEJS_DIR} "Debug" FALSE)
+get_njs_libs(${J2V8_NODEJS_DIR} "Release" TRUE)
# j2v8 build options
set(J2V8_TARGET_ARCH "" CACHE STRING "The target architecture for the build.")
diff --git a/build_system/.gitignore b/build_system/.gitignore
new file mode 100644
index 000000000..77efafb9a
--- /dev/null
+++ b/build_system/.gitignore
@@ -0,0 +1 @@
+test-reports
diff --git a/build_system/build_constants.py b/build_system/build_constants.py
index d3875d989..82d763919 100644
--- a/build_system/build_constants.py
+++ b/build_system/build_constants.py
@@ -19,7 +19,8 @@
CLIStep(c.build_node_js, " Builds the Node.js & V8 dependency artifacts that are later linked into the J2V8 native bridge code.\n" +
" (only works if the Node.js source was checked out into the J2V8 ./node directory)"),
CLIStep(c.build_j2v8_cmake, " Uses CMake to generate the native Makefiles / IDE project files to later build the J2V8 C++ native bridge shared libraries."),
- CLIStep(c.build_j2v8_jni, " Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8."),
+ CLIStep(c.build_j2v8_jni, " Generate the J2V8 JNI C++ Header files."),
+ CLIStep(c.build_j2v8_cpp, " Compile and link the J2V8 C++ shared libraries (.so/.dylib/.dll), which provide the JNI bridge to interop with the C++ code of Node.js / V8."),
CLIStep(c.build_j2v8_optimize, " The native J2V8 libraries are optimized for performance and/or filesize by using the available tools of the target-platform / compiler-toolchain."),
CLIStep(c.build_j2v8_java, " Compiles the Java source code and packages it, including the previously built native libraries, into the final package artifacts.\n" +
" For the execution of this build-step Maven (Java) or Gradle (Android) are used for the respective target platforms."),
diff --git a/build_system/build_executor.py b/build_system/build_executor.py
index 67ad87b57..8112c43e6 100644
--- a/build_system/build_executor.py
+++ b/build_system/build_executor.py
@@ -8,13 +8,14 @@
from shell_build import ShellBuildSystem
import immutable
-# collection of all parsed build-steps that will then be passed on to the core build function
-# (this list must only contain atomic steps after all step evaluations are finished)
-parsed_steps = set()
+class BuildState:
+ # collection of all parsed build-steps that will then be passed on to the core build function
+ # (this list must only contain atomic steps after all step evaluations are finished)
+ parsed_steps = set()
-# a registry/dictionary of evaluation-functions that translate from their corresponding step/alias
-# into the list of atomic build-steps (see parsed_steps above)
-step_evaluators = {}
+ # a registry/dictionary of evaluation-functions that translate from their corresponding step/alias
+ # into the list of atomic build-steps (see parsed_steps above)
+ step_evaluators = {}
#-----------------------------------------------------------------------
# Advanced build-step parsing (anti-steps, multi-steps)
@@ -28,11 +29,14 @@ def atomic_step(step, alias = None):
if (alias is None):
alias = step
+ step_eval = BuildState.step_evaluators
+ parsed_steps = BuildState.parsed_steps
+
# add step handler (step => step)
- step_evaluators[alias] = lambda: parsed_steps.add(step)
+ step_eval[alias] = lambda: parsed_steps.add(step)
# add anti-step handler (step => ~step)
- step_evaluators["~" + alias] = lambda: parsed_steps.discard(step)
+ step_eval["~" + alias] = lambda: parsed_steps.discard(step)
# register additional anti-step in CLI
bc.avail_build_steps.append("~" + alias)
@@ -43,15 +47,18 @@ def multi_step(alias, include, exclude = []):
the defined step alias name was detected. Also the inverted anti-steps sequence
will be evaluated if the "~" prefixed alias is recognized.
"""
+
+ step_eval = BuildState.step_evaluators
+
# add aliased step-sequence (alias => step1, step2, ... , stepN)
- step_evaluators[alias] = lambda: \
- [step_evaluators.get(s)() for s in include] + \
- [step_evaluators.get("~" + s)() for s in exclude]
+ step_eval[alias] = lambda: \
+ [step_eval.get(s)() for s in include] + \
+ [step_eval.get("~" + s)() for s in exclude]
# add aliased anti-step-sequence (~alias => ~step1, ~step2, ... , ~stepN)
- step_evaluators["~" + alias] = lambda: \
- [step_evaluators.get("~" + s)() for s in include] + \
- [step_evaluators.get(s)() for s in exclude]
+ step_eval["~" + alias] = lambda: \
+ [step_eval.get("~" + s)() for s in include] + \
+ [step_eval.get(s)() for s in exclude]
# register additional anti-step in CLI
bc.avail_build_steps.append("~" + alias)
@@ -74,6 +81,7 @@ def init_buildsteps():
c.build_node_js,
c.build_j2v8_cmake,
c.build_j2v8_jni,
+ c.build_j2v8_cpp,
c.build_j2v8_optimize,
])
@@ -83,11 +91,11 @@ def init_buildsteps():
def evaluate_build_step_option(step):
"""Find the registered evaluator function for the given step and execute it"""
- step_eval_func = step_evaluators.get(step, raise_unhandled_option(step))
+ step_eval_func = BuildState.step_evaluators.get(step, raise_unhandled_option(step))
step_eval_func()
def raise_unhandled_option(step):
- return lambda: sys.exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"")
+ return lambda: utils.cli_exit("INTERNAL-ERROR: Tried to handle unrecognized build-step \"" + step + "\"")
# initialize the advanced parsing evaluation handlers for the build.py CLI
init_buildsteps()
@@ -115,35 +123,38 @@ def execute_build(params):
if (isinstance(params, dict)):
params = cli.BuildParams(params)
+ # can be used to force output of all started sub-processes through the host-process stdout
+ utils.redirect_stdout_enabled = hasattr(params, "redirect_stdout") and params.redirect_stdout
+
if (params.target is None):
- sys.exit("ERROR: No target platform specified")
+ utils.cli_exit("ERROR: No target platform specified")
if (params.docker and params.vagrant):
- sys.exit("ERROR: Choose either Docker or Vagrant for the build, can not use both")
+ utils.cli_exit("ERROR: Choose either Docker or Vagrant for the build, can not use both")
target = params.target
if (not target in bc.platform_configs):
- sys.exit("ERROR: Unrecognized target platform: " + target)
+ utils.cli_exit("ERROR: Unrecognized target platform: " + target)
# this defines the PlatformConfig / operating system the build should be run for
target_platform = bc.platform_configs.get(target)
if (params.arch is None):
- sys.exit("ERROR: No target architecture specified")
+ utils.cli_exit("ERROR: No target architecture specified")
avail_architectures = target_platform.architectures
if (not params.arch in avail_architectures):
- sys.exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target)
+ utils.cli_exit("ERROR: Unsupported architecture: \"" + params.arch + "\" for selected target platform: " + target)
if (params.buildsteps is None):
- sys.exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps))
+ utils.cli_exit("ERROR: No build-step specified, valid values are: " + ", ".join(bc.avail_build_steps))
if (not params.buildsteps is None and not isinstance(params.buildsteps, list)):
params.buildsteps = [params.buildsteps]
- global parsed_steps
+ parsed_steps = BuildState.parsed_steps
parsed_steps.clear()
# go through the raw list of build-steps (given by the CLI or an API call)
@@ -155,7 +166,7 @@ def execute_build(params):
parsed_steps = [step for step in bc.atomic_build_step_sequence if step in parsed_steps]
if (len(parsed_steps) == 0):
- sys.exit("WARNING: No build-steps to be done ... exiting")
+ utils.cli_exit("WARNING: No build-steps to be done ... exiting")
build_cwd = utils.get_cwd()
@@ -168,7 +179,7 @@ def execute_build(params):
# try to find the configuration parameters to run the cross-compiler
if (cross_sys):
if (cross_configs.get(cross_sys) is None):
- sys.exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'")
+ utils.cli_exit("ERROR: target '" + target + "' does not have a recognized cross-compile host: '" + cross_sys + "'")
else:
cross_cfg = cross_configs.get(cross_sys)
@@ -231,14 +242,14 @@ def execute_build_step(build_system, build_step):
cross_cfg = cross_configs.get(params.cross_agent)
if (cross_cfg is None):
- sys.exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent)
+ utils.cli_exit("ERROR: internal error while looking for cross-compiler config: " + params.cross_agent)
build_cwd = cross_cfg.build_cwd
# execute all steps from a list that parsed / evaluated before (see the "build-step parsing" section above)
for step in parsed_steps:
if (not step in build_steps):
- print("INFO: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")")
+ print("WARNING: skipping build step \"" + step + "\" (not configured and/or supported for platform \"" + params.target + "\")")
continue
target_step = build_steps[step]
diff --git a/build_system/build_interactive.py b/build_system/build_interactive.py
index 30e1f1722..6f0300595 100644
--- a/build_system/build_interactive.py
+++ b/build_system/build_interactive.py
@@ -3,6 +3,7 @@
import build_configs as bcfg
import build_executor as bex
+import build_utils as utils
def run_interactive_cli():
idx = 0
@@ -20,7 +21,7 @@ def run_interactive_cli():
else input("Select a predefined build-configuration to run: ")
if not isinstance(sel_index, int) or sel_index < 0 or sel_index > len(bcfg.configs):
- sys.exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]")
+ utils.cli_exit("ERROR: Must enter a valid test index in the range [0 ... " + str(len(bcfg.configs)) + "]")
sel_cfg = bcfg.configs[sel_index]
diff --git a/build_system/build_structures.py b/build_system/build_structures.py
index 40a71b5df..32810acfa 100644
--- a/build_system/build_structures.py
+++ b/build_system/build_structures.py
@@ -33,7 +33,7 @@ def cross_compiler(self, cross_host_name):
compiler = self.cross_compilers.get(cross_host_name)
if (not compiler):
- sys.exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name)
+ utils.cli_exit("ERROR: internal error while looking for cross-compiler: " + cross_host_name)
return compiler()
diff --git a/build_system/build_utils.py b/build_system/build_utils.py
index 1698c81bd..66083c409 100644
--- a/build_system/build_utils.py
+++ b/build_system/build_utils.py
@@ -15,6 +15,10 @@ def get_cwd():
def host_cmd_sep():
return "&& " if os.name == "nt" else "; "
+def touch(filename, times=None):
+ with open(filename, 'a'):
+ os.utime(filename, times)
+
def is_android(platform):
return c.target_android in platform
@@ -38,18 +42,109 @@ def platform_libext(config):
return lib_ext
+def cli_exit(message):
+ """
+ sys.exit() messages are not picked up correctly when unit-testing.
+ Use this function instead!
+ """
+ sys.stderr.write(message + "\n")
+ sys.stderr.flush()
+ sys.exit(1)
+
+# based on code from: https://stackoverflow.com/a/16260159/425532
+def readlines(f, newlines):
+ buf = ""
+ while True:
+ #{
+ def get_pos():
+ #{
+ pos = None
+ nl = None
+ for n in newlines:
+ if pos:
+ break
+ try:
+ pos = buf.index(n)
+ except Exception:
+ pass
+
+ if pos:
+ nl = n
+
+ return (pos, nl)
+ #}
+
+ pos, nl = get_pos()
+
+ while pos:
+ yield buf[:pos] + nl
+ buf = buf[pos + len(nl):]
+ pos, nl = get_pos()
+
+ chunk = f.read(1)
+
+ if chunk == ":":
+ # read another char to make sure we catch ": " delimiter
+ buf += chunk
+ chunk = f.read(1)
+
+ if not chunk:
+ yield buf
+ break
+ buf += chunk
+ #}
+
+redirect_stdout_enabled = False
+
def execute(cmd, cwd = None):
"""
Low-Level CLI utility function to execute a shell command in a sub-process of the current python process
- (redirects all output to stdout)
+ (redirects all output to the host-process stdout if redirect_stdout_enabled is True)
"""
- # flush any buffered console output, because popen could block the terminal
- sys.stdout.flush()
-
- p = subprocess.Popen(cmd, universal_newlines=True, shell=True, cwd=cwd)
- return_code = p.wait()
- if return_code:
- raise subprocess.CalledProcessError(return_code, cmd)
+ if not redirect_stdout_enabled:
+ # flush any buffered console output, because popen could block the terminal
+ sys.stdout.flush()
+
+ p = subprocess.Popen(cmd, universal_newlines=True, shell=True, cwd=cwd)
+ return_code = p.wait()
+
+ if return_code:
+ raise subprocess.CalledProcessError(return_code, cmd)
+ else:
+ # see: https://stackoverflow.com/a/22049757/425532
+ # this way of running the process and handling the process output is important because
+ # when running unit-tests in python or running e.g. a docker process, if the
+ # output does not directly go through the stdout of the python process,
+ # then it will not be picked up by some of the available unit-test runners
+
+ # flush any buffered console output, because popen could block the terminal
+ sys.stdout.flush()
+
+ p = subprocess.Popen(cmd,
+ shell=True,
+ cwd=cwd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT#,
+ #stdin=sys.stdin
+ )
+ # while True:
+ # line = p.stdout.readline()
+ # if line == '':
+ # break
+ # print(line.strip("\r\n")) # remove extra ws between lines
+ # sys.stdout.flush()
+
+ # also look for ": " as a output separator, because Vagrant emits this
+ # right before some relevant user input is requested
+ # (this makes sure that we get all output for the input is required)
+ for line in readlines(p.stdout, [": ", ":", "\n"]):
+ sys.stdout.write(line)
+ sys.stdout.flush()
+
+ return_code = p.wait()
+
+ if return_code:
+ raise subprocess.CalledProcessError(return_code, cmd)
def execute_to_str(cmd, cwd = None):
"""
@@ -226,4 +321,4 @@ def check_node_builtins():
if (len(j2v8_missing) > 0):
error += "\n\t" + "J2V8 definition is missing node-modules: " + str(j2v8_missing)
- sys.exit(error)
+ cli_exit(error)
diff --git a/build_system/cli.py b/build_system/cli.py
index e7effbd28..3708d4de3 100644
--- a/build_system/cli.py
+++ b/build_system/cli.py
@@ -5,24 +5,48 @@
class BuildParams(object):
"""Value container for all build-parameters"""
- def __init__(self, d):
- self.target = d.get("target")
- self.arch = d.get("arch")
- self.vendor = d.get("vendor")
- self.keep_native_libs = d.get("keep_native_libs")
- self.node_enabled = d.get("node_enabled")
- self.docker = d.get("docker")
- self.vagrant = d.get("vagrant")
- self.sys_image = d.get("sys_image")
- self.no_shutdown = d.get("no_shutdown")
- self.buildsteps = d.get("buildsteps") or c.build_all
-
+ def __init__(self, param_dict):
+
+ known_params = {
+ "target": None,
+ "arch": None,
+ "vendor": None,
+ "keep_native_libs": None,
+ "node_enabled": None,
+ "docker": None,
+ "vagrant": None,
+ "sys_image": None,
+ "no_shutdown": None,
+ "redirect_stdout": None,
+ "buildsteps": c.build_all,
+ }
+
+ unhandled = set(param_dict.keys()).difference(set(known_params.keys()))
+
+ if any(unhandled):
+ raise Exception("Unhandled BuildParams: " + str(unhandled))
+
+ for param in known_params:
+ # try to read value from input
+ value = param_dict.get(param)
+
+ if value != None:
+ # use input value
+ setattr(self, param, value)
+ else:
+ # use default value
+ default = known_params.get(param)
+ setattr(self, param, default)
+
+ # this should never be passed in by the user, it is used just internally
self.cross_agent = None
def init_args(parser):
"""Initialize all supported build.py parameters and commands on the CLI parser"""
+ #-----------------------------------------------------------------------
# Essential build settings
+ #-----------------------------------------------------------------------
parser.add_argument("--target", "-t",
help="The build target platform name (must be a valid platform string identifier).",
dest="target",
@@ -35,7 +59,9 @@ def init_args(parser):
required=True,
choices=bc.avail_architectures)
+ #-----------------------------------------------------------------------
# Optional build settings
+ #-----------------------------------------------------------------------
parser.add_argument("--vendor", "-v",
help="The operating system vendor (most relevant when building for a specific Linux distribution).",
dest="vendor")
@@ -47,7 +73,9 @@ def init_args(parser):
action="store_const",
const=True)
+ #-----------------------------------------------------------------------
# J2V8 Feature switches
+ #-----------------------------------------------------------------------
parser.add_argument("--node-enabled", "-ne",
help="Include the Node.js runtime and builtin node-modules for use in J2V8.",
dest="node_enabled",
@@ -55,7 +83,9 @@ def init_args(parser):
action="store_const",
const=True)
+ #-----------------------------------------------------------------------
# Docker / Vagrant cross-compile settings
+ #-----------------------------------------------------------------------
parser.add_argument("--docker", "-dkr",
help="Run a cross-compile environment in a Docker container (all required build-tools are then fully contained & virtualized).",
dest="docker",
@@ -80,7 +110,9 @@ def init_args(parser):
action="store_const",
const=True)
+ #-----------------------------------------------------------------------
# Meta-Args
+ #-----------------------------------------------------------------------
# NOTE: this option is only used internally to distinguish the running of the build script within
# the build-instigator and the actual build-executor (this is relevant when cross-compiling)
parser.add_argument("--cross-agent",
@@ -88,6 +120,16 @@ def init_args(parser):
dest="cross_agent",
type=str)
+ parser.add_argument("--redirect-stdout", "-rso",
+ help="Make sure that the stdout/stderr of sub-proccesses running shell commands is also going through the " +
+ "output interface of the python host process that is running the build.\n" +
+ "(this is required when running tests for the build-system, without this option the output of the subprocesses will "+
+ "not show up in the test logs)",
+ dest="redirect_stdout",
+ default=False,
+ action="store_const",
+ const=True)
+
parser.add_argument("--interactive", "-i",
help="Run the interactive version of the J2V8 build CLI.",
dest="interactive",
@@ -95,6 +137,9 @@ def init_args(parser):
action="store_const",
const=True)
+ #-----------------------------------------------------------------------
+ # Build-Steps
+ #-----------------------------------------------------------------------
parser.add_argument("buildsteps",
help="Pass a single build-step or a list of all the recognized build-steps that should be executed\n" +
"(the order of the steps given to the CLI does not matter, the correct order will be restored internally).\n\n" +
@@ -111,6 +156,6 @@ def init_args(parser):
def get_parser():
"""Get a CLI parser instance that accepts all supported build.py parameters and commands"""
- parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
+ parser = argparse.ArgumentParser(prog="build", formatter_class=argparse.RawTextHelpFormatter)
init_args(parser)
return parser
diff --git a/build_system/config_android.py b/build_system/config_android.py
index 0f33fdf75..54efc4900 100644
--- a/build_system/config_android.py
+++ b/build_system/config_android.py
@@ -62,13 +62,15 @@ def build_j2v8_cmake(config):
android_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake)
#-----------------------------------------------------------------------
-def build_j2v8_jni(config):
+android_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni)
+#-----------------------------------------------------------------------
+def build_j2v8_cpp(config):
return [
"cd " + u.cmake_out_dir,
"make -j4",
]
-android_config.build_step(c.build_j2v8_jni, build_j2v8_jni)
+android_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp)
#-----------------------------------------------------------------------
def build_j2v8_java(config):
return \
diff --git a/build_system/config_linux.py b/build_system/config_linux.py
index 418bb54f0..38c5e6594 100644
--- a/build_system/config_linux.py
+++ b/build_system/config_linux.py
@@ -1,6 +1,7 @@
import constants as c
from build_structures import PlatformConfig
from docker_build import DockerBuildSystem, DockerBuildStep
+import java_build_steps as j
import shared_build_steps as u
import cmake_utils as cmu
@@ -57,13 +58,15 @@ def build_j2v8_cmake(config):
linux_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake)
#-----------------------------------------------------------------------
-def build_j2v8_jni(config):
+linux_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni)
+#-----------------------------------------------------------------------
+def build_j2v8_cpp(config):
return [
"cd " + u.cmake_out_dir,
"make -j4",
]
-linux_config.build_step(c.build_j2v8_jni, build_j2v8_jni)
+linux_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp)
#-----------------------------------------------------------------------
def build_j2v8_optimize(config):
# NOTE: execstack / strip are not part of the alpine tools, therefore we just skip this step
@@ -78,21 +81,7 @@ def build_j2v8_optimize(config):
linux_config.build_step(c.build_j2v8_optimize, build_j2v8_optimize)
#-----------------------------------------------------------------------
-def build_j2v8_java(config):
- u.apply_maven_config_settings(config)
-
- return \
- u.clearNativeLibs(config) + \
- u.copyNativeLibs(config) + \
- u.setJavaHome(config) + \
- [u.build_cmd] + \
- u.copyOutput(config)
-
-linux_config.build_step(c.build_j2v8_java, build_j2v8_java)
+j.add_java_step(linux_config, c.build_j2v8_java, [u.java_build_cmd])
#-----------------------------------------------------------------------
-def build_j2v8_junit(config):
- return \
- [u.run_tests_cmd]
-
-linux_config.build_step(c.build_j2v8_junit, build_j2v8_junit)
+j.add_java_step(linux_config, c.build_j2v8_junit, [u.java_tests_cmd])
#-----------------------------------------------------------------------
diff --git a/build_system/config_macos.py b/build_system/config_macos.py
index d113c3942..c85723835 100644
--- a/build_system/config_macos.py
+++ b/build_system/config_macos.py
@@ -2,6 +2,7 @@
import constants as c
from build_structures import PlatformConfig
from vagrant_build import VagrantBuildSystem, VagrantBuildStep
+import java_build_steps as j
import shared_build_steps as u
import cmake_utils as cmu
@@ -57,28 +58,17 @@ def build_j2v8_cmake(config):
macos_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake)
#-----------------------------------------------------------------------
-def build_j2v8_jni(config):
+macos_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni)
+#-----------------------------------------------------------------------
+def build_j2v8_cpp(config):
return [
"cd " + u.cmake_out_dir,
"make -j4",
]
-macos_config.build_step(c.build_j2v8_jni, build_j2v8_jni)
+macos_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp)
#-----------------------------------------------------------------------
-def build_j2v8_java(config):
- u.apply_maven_config_settings(config)
-
- return \
- u.clearNativeLibs(config) + \
- u.copyNativeLibs(config) + \
- [u.build_cmd] + \
- u.copyOutput(config)
-
-macos_config.build_step(c.build_j2v8_java, build_j2v8_java)
+j.add_java_step(macos_config, c.build_j2v8_java, [u.java_build_cmd])
#-----------------------------------------------------------------------
-def build_j2v8_junit(config):
- return \
- [u.run_tests_cmd]
-
-macos_config.build_step(c.build_j2v8_junit, build_j2v8_junit)
+j.add_java_step(macos_config, c.build_j2v8_junit, [u.java_tests_cmd])
#-----------------------------------------------------------------------
diff --git a/build_system/config_win32.py b/build_system/config_win32.py
index 00708fae8..42ec65605 100644
--- a/build_system/config_win32.py
+++ b/build_system/config_win32.py
@@ -3,6 +3,7 @@
from build_structures import PlatformConfig
from docker_build import DockerBuildSystem, DockerBuildStep
from vagrant_build import VagrantBuildSystem, VagrantBuildStep
+import java_build_steps as j
import shared_build_steps as u
import cmake_utils as cmu
@@ -60,7 +61,9 @@ def build_j2v8_cmake(config):
win32_config.build_step(c.build_j2v8_cmake, build_j2v8_cmake)
#-----------------------------------------------------------------------
-def build_j2v8_jni(config):
+win32_config.build_step(c.build_j2v8_jni, u.build_j2v8_jni)
+#-----------------------------------------------------------------------
+def build_j2v8_cpp(config):
# show docker container memory usage / limit
show_mem = ["powershell C:/j2v8/docker/win32/mem.ps1"] if config.cross_agent == "docker" else []
@@ -72,22 +75,9 @@ def build_j2v8_jni(config):
] + \
show_mem
-win32_config.build_step(c.build_j2v8_jni, build_j2v8_jni)
+win32_config.build_step(c.build_j2v8_cpp, build_j2v8_cpp)
#-----------------------------------------------------------------------
-def build_j2v8_java(config):
- u.apply_maven_config_settings(config)
-
- return \
- u.clearNativeLibs(config) + \
- u.copyNativeLibs(config) + \
- [u.build_cmd] + \
- u.copyOutput(config)
-
-win32_config.build_step(c.build_j2v8_java, build_j2v8_java)
+j.add_java_step(win32_config, c.build_j2v8_java, [u.java_build_cmd])
#-----------------------------------------------------------------------
-def build_j2v8_junit(config):
- return \
- [u.run_tests_cmd]
-
-win32_config.build_step(c.build_j2v8_junit, build_j2v8_junit)
+j.add_java_step(win32_config, c.build_j2v8_junit, [u.java_tests_cmd])
#-----------------------------------------------------------------------
diff --git a/build_system/constants.py b/build_system/constants.py
index f32371307..cd9c08593 100644
--- a/build_system/constants.py
+++ b/build_system/constants.py
@@ -18,6 +18,7 @@
build_node_js = 'nodejs'
build_j2v8_cmake = 'j2v8cmake'
build_j2v8_jni = 'j2v8jni'
+build_j2v8_cpp = 'j2v8cpp'
build_j2v8_optimize = 'j2v8optimize'
build_j2v8_java = 'j2v8java'
build_j2v8_junit = 'j2v8junit'
diff --git a/build_system/docker_build.py b/build_system/docker_build.py
index accd8b187..a80ac21e8 100644
--- a/build_system/docker_build.py
+++ b/build_system/docker_build.py
@@ -1,12 +1,13 @@
-import atexit
import re
+import signal
import subprocess
import sys
from build_structures import BuildSystem, BuildStep
import constants as c
import build_utils as utils
+import docker_configs as dkr_cfg
class DockerBuildStep(BuildStep):
def __init__(self, platform, build_cwd = None, host_cwd = None):
@@ -21,22 +22,24 @@ def clean(self, config):
return
def health_check(self, config):
+ print "Verifying Docker build-system status..."
try:
# general docker availability check
- self.exec_host_cmd("docker stats --no-stream", config)
+ self.exec_host_cmd("docker --version", config)
+ # check the currently active container technology (linux vs. windows containers)
# NOTE: the additional newlines are important for the regex matching
version_str = utils.execute_to_str("docker version") + "\n\n"
server_match = re.search(r"Server:(.*)\n\n", version_str + "\n\n", re.DOTALL)
if (server_match is None or server_match.group(1) is None):
- sys.exit("ERROR: Unable to determine docker server version from version string: \n\n" + version_str)
+ utils.cli_exit("ERROR: Unable to determine docker server version from version string: \n\n" + version_str)
version_match = re.search(r"^ OS/Arch:\s+(.*)$", server_match.group(1), re.MULTILINE)
if (version_match is None):
- sys.exit("ERROR: Unable to determine docker server platform from version string: \n\n" + version_str)
+ utils.cli_exit("ERROR: Unable to determine docker server platform from version string: \n\n" + version_str)
docker_version = version_match.group(1)
@@ -44,10 +47,10 @@ def health_check(self, config):
# check if the docker engine is running the expected container platform (linux or windows)
if (docker_req_platform not in docker_version):
- sys.exit("ERROR: docker server must be using " + docker_req_platform + " containers, instead found server version using: " + docker_version)
+ utils.cli_exit("ERROR: docker server must be using " + docker_req_platform + " containers, instead found server version using: " + docker_version)
except subprocess.CalledProcessError:
- sys.exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!")
+ utils.cli_exit("ERROR: Failed Docker build-system health check, make sure Docker is available and running!")
def get_image_name(self, config):
return "j2v8-$VENDOR-$PLATFORM"
@@ -61,22 +64,41 @@ def pre_build(self, config):
container_name = self.get_container_name(config)
docker_stop_str = self.inject_env("docker stop " + container_name, config)
- def cli_exit_event():
- if (config.no_shutdown):
+ def cli_exit_event(signal, frame):
+ if config.no_shutdown:
+ print "INFO: Docker J2V8 container will continue running..."
return
print "Waiting for docker process to exit..."
self.exec_host_cmd(docker_stop_str, config)
- atexit.register(cli_exit_event)
+ signal.signal(signal.SIGINT, cli_exit_event)
args_str = ""
- if (config.sys_image):
- args_str += " --build-arg sys_image=" + config.sys_image
+ def build_arg(name, value):
+ return (" --build-arg " + name + "=" + value) if value else ""
- if (config.vendor):
- args_str += " --build-arg vendor=" + config.vendor
+ def sys_image_arg(value):
+ return build_arg("sys_image", value)
+
+ def vendor_arg(value):
+ return build_arg("vendor", value)
+
+ # use custom sys-image if it was specified by the user
+ args_str += sys_image_arg(config.sys_image)
+
+ # if we are building with docker
+ # and a specific vendor was specified for the build
+ # and no custom sys-image was specified ...
+ if (config.docker and config.vendor and not config.sys_image):
+ vendor_default_image = dkr_cfg.vendor_default_images.get(config.vendor)
+
+ # ... then use the default image for that vendor if available
+ args_str += sys_image_arg(vendor_default_image)
+
+ # pass a specified vendor string to the docker build
+ args_str += vendor_arg(config.vendor)
image_name = self.get_image_name(config)
diff --git a/build_system/docker_configs.py b/build_system/docker_configs.py
new file mode 100644
index 000000000..d41e59481
--- /dev/null
+++ b/build_system/docker_configs.py
@@ -0,0 +1,6 @@
+import constants as c
+
+vendor_default_images = {
+ c.vendor_alpine: "openjdk:8u131-alpine",
+ c.vendor_debian: "debian:jessie",
+}
diff --git a/build_system/java_build_steps.py b/build_system/java_build_steps.py
new file mode 100644
index 000000000..c740438cc
--- /dev/null
+++ b/build_system/java_build_steps.py
@@ -0,0 +1,32 @@
+import shared_build_steps as u
+
+def add_java_step(platform_config, build_step, step_cmds):
+ # add the common preparation sequence for a maven build-step to the platform-config
+ if not hasattr(platform_config, "prepare_maven"):
+ platform_config.prepare_maven = lambda config: \
+ u.clearNativeLibs(config) + \
+ u.copyNativeLibs(config) + \
+ u.setJavaHome(config)
+ #-----------------------------------------------------------------------
+ # add a build-step that involves running maven and requires some preparation
+ def java_build_step(cmds):
+ def build_func(config):
+ # update maven pom.xml settings
+ u.apply_maven_config_settings(config)
+
+ # assemble the commands for this build-step
+ # includes the preparation commands for maven
+ steps = \
+ platform_config.prepare_maven(config) + \
+ cmds + \
+ u.copyOutput(config)
+
+ # the shell was already prepared for running maven,
+ # if another java step will run later on this does not to be done again
+ platform_config.prepare_maven = lambda cfg: ["echo Native lib already copied..."]
+
+ return steps
+ return build_func
+ #-----------------------------------------------------------------------
+ platform_config.build_step(build_step, java_build_step(step_cmds))
+ #-----------------------------------------------------------------------
diff --git a/build_system/run_tests.py b/build_system/run_tests.py
new file mode 100644
index 000000000..0ac3ebcb0
--- /dev/null
+++ b/build_system/run_tests.py
@@ -0,0 +1,18 @@
+from unittest import TestLoader, TestSuite
+from tests.runner.test_runner import SurePhyreTestRunner
+
+import tests.test_linux_docker
+import tests.test_macos_vagrant
+import tests.test_win32_docker
+import tests.test_win32_native
+
+loader = TestLoader()
+suite = TestSuite((
+ loader.loadTestsFromModule(tests.test_linux_docker),
+ # loader.loadTestsFromModule(tests.test_macos_vagrant),
+ # loader.loadTestsFromModule(tests.test_win32_docker),
+ # loader.loadTestsFromModule(tests.test_win32_native),
+))
+
+runner = SurePhyreTestRunner()
+runner.run(suite)
diff --git a/build_system/shared_build_steps.py b/build_system/shared_build_steps.py
index 2fcce619c..ac8f8b03e 100644
--- a/build_system/shared_build_steps.py
+++ b/build_system/shared_build_steps.py
@@ -1,5 +1,5 @@
"""
-A collection of commands, variables and functions that are very likely to be
+A collection of commands, constants and functions that are very likely to be
reused between target-platform configurations or build-steps on the same platform.
"""
import glob
@@ -7,24 +7,41 @@
import sys
import xml.etree.ElementTree as ET
+# see: https://stackoverflow.com/a/27333347/425532
+class XmlCommentParser(ET.XMLTreeBuilder):
+
+ def __init__(self):
+ ET.XMLTreeBuilder.__init__(self)
+ # assumes ElementTree 1.2.X
+ self._parser.CommentHandler = self.handle_comment
+
+ def handle_comment(self, data):
+ self._target.start(ET.Comment, {})
+ self._target.data(data)
+ self._target.end(ET.Comment)
+
import constants as c
import build_settings as s
import build_utils as utils
# TODO: add CLI option to override / pass-in custom maven/gradle args
-build_cmd = "mvn clean verify -DskipTests -e"
-run_tests_cmd = "mvn test -e"# -Dtest=V8RuntimeNotLoadedTest"
+# NOTE: --batch-mode is needed to avoid unicode symbols messing up stdout while unit-testing the build-system
+java_build_cmd = "mvn clean verify --batch-mode -DskipTests -e"
+java_tests_cmd = "mvn test -e --batch-mode"
# the ./ should work fine on all platforms
# IMPORTANT: on MacOSX the ./ prefix is a strict requirement by some CLI commands !!!
cmake_out_dir = "./cmake.out/$VENDOR-$PLATFORM.$ARCH/"
+#-----------------------------------------------------------------------
+# Common shell commands & utils
+#-----------------------------------------------------------------------
def gradleCmd():
return "gradlew" if os.name == "nt" else "gradle"
def gradle(cmd):
return [
- gradleCmd() + " --daemon " + cmd,
+ gradleCmd() + " " + cmd,
]
def outputLibName(config):
@@ -43,23 +60,23 @@ def setEnvVar(name, value):
return ["export " + name + "=" + value]
def setJavaHome(config):
- # NOTE: when running docker alpine-linux builds, we don't want to overwrite JAVA_HOME
- if (config.vendor == c.vendor_alpine and config.cross_agent == "docker"):
- return []
-
- return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131")
+ # NOTE: Docker Linux builds need some special handling, because not all images have
+ # a pre-defined JAVA_HOME environment variable
+ if (config.platform == c.target_linux and config.cross_agent == "docker"):
+ # currently only the Alpine image brings its own java-installation & JAVA_HOME
+ # for other Linux images we install the JDK and setup JAVA_HOME manually
+ if (config.vendor != c.vendor_alpine):
+ print "Setting JAVA_HOME env-var for Docker Linux build"
+ return setEnvVar("JAVA_HOME", "/opt/jdk/jdk1.8.0_131")
+
+ # for any other builds, we can just assume that JAVA_HOME is already set system-wide
+ print "Using system-var JAVA_HOME"
+ return []
def setVersionEnv(config):
return \
setEnvVar("J2V8_FULL_VERSION", s.J2V8_FULL_VERSION)
-def copyOutput(config):
- jar_name = outputJarName(config)
-
- return \
- mkdir("build.out") + \
- cp("target/" + jar_name + " build.out/")
-
def shell(cmd, args):
"""
Invokes the cross-platform polyfill for the shell command defined by the 'cmd' parameter
@@ -77,6 +94,36 @@ def mkdir(args):
def rm(args):
"""Invokes the cross-platform polyfill for the 'rm' shell command"""
return shell("rm", args)
+#-----------------------------------------------------------------------
+# Uniform build-steps (cross-platform)
+#-----------------------------------------------------------------------
+def build_j2v8_jni(config):
+ java_class_id = "com.eclipsesource.v8.V8"
+ java_class_parts = java_class_id.split(".")
+ java_class_filepath = "./target/classes/" + "/".join(java_class_parts) + ".class"
+
+ if (not os.path.exists(java_class_filepath)):
+ return [
+ "echo WARNING: Could not find " + java_class_parts[-1] + ".class file at path: " + java_class_filepath,
+ "echo JNI Header generation will be skipped...",
+ ]
+
+ return [
+ "echo Generating JNI header files...",
+ "cd ./target/classes",
+ "javah " + java_class_id,
+ ] + cp("com_eclipsesource_v8_V8.h ../../jni/com_eclipsesource_v8_V8Impl.h") + [
+ "echo Done",
+ ]
+#-----------------------------------------------------------------------
+# File generators, operations & utils
+#-----------------------------------------------------------------------
+def copyOutput(config):
+ jar_name = outputJarName(config)
+
+ return \
+ mkdir("build.out") + \
+ cp("target/" + jar_name + " build.out/")
def clearNativeLibs(config):
"""
@@ -112,7 +159,7 @@ def copyNativeLibs(config):
platform_lib_path = glob.glob(lib_pattern)
if (len(platform_lib_path) == 0):
- sys.exit("ERROR: Could not find native library for inclusion in platform target package")
+ utils.cli_exit("ERROR: Could not find native library for inclusion in platform target package")
platform_lib_path = platform_lib_path[0]
@@ -126,7 +173,7 @@ def copyNativeLibs(config):
else:
lib_target_path = "src/main/resources/"
- print "copying native lib from: " + platform_lib_path + " to: " + lib_target_path
+ print "Copying native lib from: " + platform_lib_path + " to: " + lib_target_path
copy_cmds += cp(platform_lib_path + " " + lib_target_path)
@@ -201,7 +248,7 @@ def __handle_setting(path, value):
print "Updating Maven configuration (" + target_pom_path + ")..."
- tree = ET.parse(src_pom_path)
+ tree = ET.parse(src_pom_path, XmlCommentParser())
root = tree.getroot()
__recurse_maven_settings(settings, __handle_setting)
diff --git a/build_system/shell_build.py b/build_system/shell_build.py
index eadeeefd7..483174172 100644
--- a/build_system/shell_build.py
+++ b/build_system/shell_build.py
@@ -13,7 +13,7 @@ def health_check(self, config):
shell_check_cmd = "ver" if utils.is_win32(config.platform) else "bash --version"
self.exec_cmd(shell_check_cmd, config)
except subprocess.CalledProcessError:
- sys.exit("ERROR: Failed Shell build-system health check!")
+ utils.cli_exit("ERROR: Failed Shell build-system health check!")
def pre_build(self, config):
return
diff --git a/build_system/tests/__init__.py b/build_system/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/build_system/tests/runner/__init__.py b/build_system/tests/runner/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/build_system/tests/runner/output_redirector.py b/build_system/tests/runner/output_redirector.py
new file mode 100644
index 000000000..808698421
--- /dev/null
+++ b/build_system/tests/runner/output_redirector.py
@@ -0,0 +1,17 @@
+
+class OutputRedirector(object):
+ """ Wrapper to redirect stdout, stderr or any other stream that it is given """
+ def __init__(self, streams):
+ self.streams = streams
+
+ def write(self, data):
+ for s in self.streams:
+ s.write(data)
+
+ def writelines(self, lines):
+ for s in self.streams:
+ s.writelines(lines)
+
+ def flush(self):
+ for s in self.streams:
+ s.flush()
diff --git a/build_system/tests/runner/test_asserts.py b/build_system/tests/runner/test_asserts.py
new file mode 100644
index 000000000..6b65fcfd5
--- /dev/null
+++ b/build_system/tests/runner/test_asserts.py
@@ -0,0 +1,7 @@
+
+def expectOutput(regex):
+ """ After a test is completed successfully, also verify that the CLI output contains an expected regex pattern. """
+ def expectOutput_wrapper(func):
+ func.__testRegex = regex
+ return func
+ return expectOutput_wrapper
diff --git a/build_system/tests/runner/test_result.py b/build_system/tests/runner/test_result.py
new file mode 100644
index 000000000..25ce43d61
--- /dev/null
+++ b/build_system/tests/runner/test_result.py
@@ -0,0 +1,210 @@
+import collections
+import datetime
+import re
+import StringIO
+import sys
+import traceback
+import unittest
+
+from output_redirector import OutputRedirector
+import test_utils as utils
+
+TestResultBase = unittest.TestResult
+
+class TestOutcome:
+ Success, Failure, Error, Skip = range(4)
+
+TestRunData = collections.namedtuple("TestRunData", "outcome test errStr errObj output elapsed")
+TestRunData.__new__.__defaults__ = (None,) * len(TestRunData._fields)
+
+class TestResult(TestResultBase):
+ def __init__(self, streams, test_cases):
+ TestResultBase.__init__(self)
+ self.__sys_stdout = None
+ self.__sys_stderr = None
+
+ self.streams = streams
+ self.test_cases = test_cases
+
+ self.class_start_time = None
+ self.class_stop_time = None
+
+ self.test_start_time = None
+ self.test_stop_time = None
+
+ # list of all generated TestRunData for this result
+ self.all_results = []
+
+ # lists of type-specific TestRunData for this result
+ self.success_results = []
+ self.failure_results = []
+ self.error_results = []
+ self.skipped_results = []
+
+ #override
+ def startTest(self, test):
+ TestResultBase.startTest(self, test)
+
+ # remember the original sys streams
+ self.__sys_stdout = sys.stdout
+ self.__sys_stderr = sys.stderr
+
+ # just one buffer for both stdout and stderr
+ self.outputBuffer = StringIO.StringIO()
+
+ sys.stdout = OutputRedirector(self.streams + [self.outputBuffer])
+ sys.stderr = OutputRedirector(self.streams + [self.outputBuffer])
+
+ # now the real testing logic kicks in
+ test_class, test_method = utils.get_test_names(test)
+
+ if (not self.class_start_time):
+ self.class_start_time = datetime.datetime.now()
+
+ self.test_start_time = datetime.datetime.now()
+
+ utils.write_separator()
+ utils.write_log("INFO", "Running %(test_class)s.%(test_method)s" % locals())
+
+ def finish_test(self, test):
+ if (self.testsRun != len(self.test_cases)):
+ return
+
+ if (not self.class_stop_time):
+ self.class_stop_time = datetime.datetime.now()
+
+ num_tests = len(self.all_results)
+ num_failures = len(self.failure_results)
+ num_errors = len(self.error_results)
+ num_skips = len(self.skipped_results)
+
+ test_class, _ = utils.get_test_names(test)
+
+ test_elapsed = self.class_stop_time - self.class_start_time
+
+ log_level = "INFO"
+ failure_tag = ""
+
+ if (num_failures or num_errors):
+ log_level = "ERROR"
+ failure_tag = "<<< FAILURE! "
+ elif (num_skips):
+ log_level = "WARNING"
+
+ utils.write_separator()
+ print
+ utils.write_separator()
+ utils.write_log(log_level, "Tests run: %(num_tests)s, Failures: %(num_failures)s, Errors: %(num_errors)s, Skipped: %(num_skips)s, Time elapsed: %(test_elapsed)s s %(failure_tag)s- in %(test_class)s" % locals())
+ utils.write_separator()
+
+ def print_errors(test_class, err_list, kind):
+ for result in err_list:
+ test = result.test
+ elapsed = result.elapsed
+ test_method = test._testMethodName
+ utils.write_log("ERROR", "%(test_method)s(%(test_class)s) Time elapsed: %(elapsed)s s <<< %(kind)s!" % locals())
+ err_frame = result.errObj[2].tb_next
+ traceback.print_tb(err_frame, 1)
+ print
+
+ # write leading newline if detail error reports should be written
+ if any(self.error_results) or any(self.failure_results):
+ print
+
+ print_errors(test_class, self.error_results, "ERROR")
+ print_errors(test_class, self.failure_results, "FAILURE")
+
+ def complete_test_case(self, test, test_info = None):
+ """
+ Disconnect output redirection and return buffer.
+ Safe to call multiple times.
+ """
+ output = self.outputBuffer.getvalue()
+
+ if (test_info):
+ self.test_stop_time = datetime.datetime.now()
+ # merge data produced during test with additional meta-data
+ test_result = TestRunData(*(test_info[:-2] + (output, self.test_stop_time - self.test_start_time)))
+
+ self.all_results.append(test_result)
+
+ if (test_result.outcome == TestOutcome.Success):
+ self.success_results.append(test_result)
+
+ elif (test_result.outcome == TestOutcome.Error):
+ self.error_results.append(test_result)
+
+ elif (test_result.outcome == TestOutcome.Failure):
+ self.failure_results.append(test_result)
+
+ elif (test_result.outcome == TestOutcome.Skip):
+ self.skipped_results.append(test_result)
+
+ if self.__sys_stdout:
+ self.finish_test(test)
+
+ # turn off the shell output redirection
+ sys.stdout = self.__sys_stdout
+ sys.stderr = self.__sys_stderr
+
+ self.__sys_stdout = None
+ self.__sys_stderr = None
+
+ #override
+ def stopTest(self, test):
+ # Usually one of addSuccess, addError or addFailure would have been called.
+ # But there are some path in unittest that would bypass this.
+ # We must disconnect stdout in stopTest(), which is guaranteed to be called.
+ self.complete_test_case(test)
+
+ def __assertTestOutput(self, test):
+ test_method = type(test).__dict__.get(test._testMethodName)
+ test_regex_field = "__testRegex"
+
+ if (hasattr(test_method, test_regex_field)):
+ regex = test_method.__dict__.get(test_regex_field)
+ output = self.outputBuffer.getvalue()
+
+ match_ok = re.search(regex, output)
+
+ if (not match_ok):
+ try:
+ raise Exception("Unable to find expected pattern in test-output:\n\t\t" + regex)
+ except Exception:
+ ex_nfo = sys.exc_info()
+ self.addFailure(test, ex_nfo)
+ return False
+
+ return True
+
+ #override
+ def addSuccess(self, test):
+
+ # after a test was successful, also run stdout/stderr asserts
+ # which can still result in a test-failure
+ if not self.__assertTestOutput(test):
+ return
+
+ TestResultBase.addSuccess(self, test)
+ testData = TestRunData(TestOutcome.Success, test, '', None)
+ self.complete_test_case(test, testData)
+
+ #override
+ def addError(self, test, err):
+ TestResultBase.addError(self, test, err)
+ _, _exc_str = self.errors[-1]
+ testData = TestRunData(TestOutcome.Error, test, _exc_str, err)
+ self.complete_test_case(test, testData)
+
+ #override
+ def addFailure(self, test, err):
+ TestResultBase.addFailure(self, test, err)
+ _, _exc_str = self.failures[-1]
+ testData = TestRunData(TestOutcome.Failure, test, _exc_str, err)
+ self.complete_test_case(test, testData)
+
+ #override
+ def addSkip(self, test, reason):
+ TestResultBase.addSkip(self, test, reason)
+ testData = TestRunData(TestOutcome.Skip, test, reason, None)
+ self.complete_test_case(test, testData)
diff --git a/build_system/tests/runner/test_runner.py b/build_system/tests/runner/test_runner.py
new file mode 100644
index 000000000..99db4991a
--- /dev/null
+++ b/build_system/tests/runner/test_runner.py
@@ -0,0 +1,113 @@
+import datetime
+import os
+import sys
+from unittest import TestSuite
+
+import __main__
+
+from test_result import TestResult, TestOutcome
+import test_utils as utils
+
+class SurePhyreTestRunner(object):
+ """
+ """
+ def __init__(self):
+ self.runner_start_time = None
+ self.runner_stop_time = None
+
+ def run(self, test):
+ "Run the given test case or test suite."
+
+ self.runner_start_time = datetime.datetime.now()
+
+ test_class_dict = {}
+
+ def find_test_methods(test_decl):
+ is_iterable = hasattr(test_decl, '__iter__')
+
+ if (is_iterable):
+ for tests in test_decl:
+ find_test_methods(tests)
+ else:
+ cls_nm = type(test_decl).__name__
+
+ if not test_class_dict.get(cls_nm):
+ test_class_dict[cls_nm] = list()
+
+ test_class_dict[cls_nm].append(test_decl)
+
+ # convert the given TestCase/TestSuite into a dictionary of test-classes
+ find_test_methods(test)
+
+ all_results = list()
+ success_results = list()
+ failure_results = list()
+ error_results = list()
+ skipped_results = list()
+
+ utils.write_separator()
+ utils.write_log("INFO", "T E S T S")
+
+ for k, class_tests in test_class_dict.iteritems():
+ class_suite = TestSuite(class_tests)
+ reports_dir = os.path.join(os.path.dirname(__main__.__file__), "test-reports")
+
+ if not os.path.exists(reports_dir):
+ os.makedirs(reports_dir)
+
+ with file(os.path.join(reports_dir, k + '.txt'), 'wb') as fp:
+ # execute all tests in this test class
+ class_result = TestResult([sys.stdout, fp], class_tests)
+ class_suite(class_result)
+
+ # get the test-results from this class and add them to the summary lists
+ all_results.extend(class_result.all_results)
+ success_results.extend(class_result.success_results)
+ failure_results.extend(class_result.failure_results)
+ error_results.extend(class_result.error_results)
+ skipped_results.extend(class_result.skipped_results)
+
+ tests_success = not any(error_results) and not any(failure_results)
+ tests_result = "SUCCESS" if tests_success else "FAILURE"
+ self.runner_stop_time = datetime.datetime.now()
+
+ # print final summary log after all tests are done running
+ print
+ utils.write_separator()
+ utils.write_log("INFO", "TESTS RUN %(tests_result)s" % locals())
+ utils.write_separator()
+ utils.write_log("INFO")
+ utils.write_log("INFO", "Results:")
+
+ if not tests_success:
+ utils.write_log("INFO")
+
+ def print_summary_problems(err_list, kind):
+ if (any(err_list)):
+ utils.write_log("ERROR", kind + "s: ")
+
+ for r in err_list:
+ test_class, test_method = utils.get_test_names(r.test)
+ err_message = r.errObj[1].message
+ err_frame = r.errObj[2].tb_next
+ err_lineno = err_frame.tb_lineno if err_frame else ""
+ utils.write_log("ERROR", " %(test_class)s.%(test_method)s:%(err_lineno)s %(err_message)s" % locals())
+
+ print_summary_problems(failure_results, "Failure")
+ print_summary_problems(error_results, "Error")
+
+ num_success = len(success_results)
+ num_failures = len(failure_results)
+ num_errors = len(error_results)
+ num_skips = len(skipped_results)
+
+ utils.write_log("INFO")
+ utils.write_log("ERROR", "Tests run: %(num_success)s, Failures: %(num_failures)s, Errors: %(num_errors)s, Skipped: %(num_skips)s" % locals())
+ utils.write_log("INFO")
+
+ total_elapsed = self.runner_stop_time - self.runner_start_time
+
+ utils.write_separator()
+ utils.write_log("INFO", "Total time: %(total_elapsed)s s" % locals())
+ utils.write_log("INFO", "Finished at: %s" % self.runner_stop_time)
+ utils.write_separator()
diff --git a/build_system/tests/runner/test_utils.py b/build_system/tests/runner/test_utils.py
new file mode 100644
index 000000000..27cbb9307
--- /dev/null
+++ b/build_system/tests/runner/test_utils.py
@@ -0,0 +1,11 @@
+
+def write_log(level, message = ""):
+ print "$ [%(level)s] %(message)s" % locals()
+
+def write_separator():
+ print "$---------------------------------------------------------------------------------------------------"
+
+def get_test_names(test):
+ test_class = type(test).__name__
+ test_method = test._testMethodName
+ return (test_class, test_method)
diff --git a/build_system/tests/test_linux_docker.py b/build_system/tests/test_linux_docker.py
new file mode 100644
index 000000000..81d9d1b61
--- /dev/null
+++ b/build_system/tests/test_linux_docker.py
@@ -0,0 +1,41 @@
+import unittest
+
+from runner.test_asserts import *
+
+import constants as c
+import build_executor as bex
+
+class TestLinuxDocker(unittest.TestCase):
+
+ def with_x64_defaults(self, params):
+ x64_defaults = {
+ "target": c.target_linux,
+ "arch": c.arch_x64,
+ "docker": True,
+ "redirect_stdout": True, # important for test-logging
+ }
+ params.update(x64_defaults)
+ return params
+
+ @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_disabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
+
+ # TODO: could use functional parameter overload to return error message + details
+ # (e.g. match regex groups for numfails, numerrors, numskips, etc. and make advanced asserts)
+ @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_enabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "node_enabled": True,
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
diff --git a/build_system/tests/test_macos_vagrant.py b/build_system/tests/test_macos_vagrant.py
new file mode 100644
index 000000000..029483e6d
--- /dev/null
+++ b/build_system/tests/test_macos_vagrant.py
@@ -0,0 +1,40 @@
+import unittest
+
+from runner.test_asserts import *
+
+import constants as c
+import build_executor as bex
+
+class TestMacOSVagrant(unittest.TestCase):
+
+ def with_x64_defaults(self, params):
+ x64_defaults = {
+ "target": c.target_macos,
+ "arch": c.arch_x64,
+ "vagrant": True,
+ "no_shutdown": True,
+ "redirect_stdout": True, # important for test-logging
+ }
+ params.update(x64_defaults)
+ return params
+
+ @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_disabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
+
+ @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_enabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "node_enabled": True,
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
diff --git a/build_system/tests/test_win32_docker.py b/build_system/tests/test_win32_docker.py
new file mode 100644
index 000000000..b6f517bcd
--- /dev/null
+++ b/build_system/tests/test_win32_docker.py
@@ -0,0 +1,39 @@
+import unittest
+
+from runner.test_asserts import *
+
+import constants as c
+import build_executor as bex
+
+class TestWin32Docker(unittest.TestCase):
+
+ def with_x64_defaults(self, params):
+ x64_defaults = {
+ "target": c.target_win32,
+ "arch": c.arch_x64,
+ "docker": True,
+ "redirect_stdout": True, # important for test-logging
+ }
+ params.update(x64_defaults)
+ return params
+
+ @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_disabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
+
+ @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_enabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "node_enabled": True,
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
diff --git a/build_system/tests/test_win32_native.py b/build_system/tests/test_win32_native.py
new file mode 100644
index 000000000..de341df53
--- /dev/null
+++ b/build_system/tests/test_win32_native.py
@@ -0,0 +1,38 @@
+import unittest
+
+from runner.test_asserts import *
+
+import constants as c
+import build_executor as bex
+
+class TestWin32Native(unittest.TestCase):
+
+ def with_x64_defaults(self, params):
+ x64_defaults = {
+ "target": c.target_win32,
+ "arch": c.arch_x64,
+ "redirect_stdout": True, # important for test-logging
+ }
+ params.update(x64_defaults)
+ return params
+
+ @expectOutput(r"\[WARNING\] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_disabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
+
+ @expectOutput(r"\[INFO\] Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: \d+.\d+ s - in com\.eclipsesource\.v8\.NodeJSTest")
+ def test_x64_node_enabled(self):
+
+ params = self.with_x64_defaults(
+ {
+ "node_enabled": True,
+ "buildsteps": ["j2v8", "test"],
+ })
+
+ bex.execute_build(params)
diff --git a/build_system/vagrant_build.py b/build_system/vagrant_build.py
index 25e9624f9..919be294b 100644
--- a/build_system/vagrant_build.py
+++ b/build_system/vagrant_build.py
@@ -1,4 +1,4 @@
-import atexit
+import signal
import subprocess
import sys
import build_utils as utils
@@ -15,10 +15,11 @@ def clean(self, config):
return
def health_check(self, config):
+ print "Verifying Vagrant build-system status..."
try:
- self.exec_host_cmd("vagrant global-status", config)
+ self.exec_host_cmd("vagrant --version", config)
except subprocess.CalledProcessError:
- sys.exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!")
+ utils.cli_exit("ERROR: Failed Vagrant build-system health check, make sure Vagrant is available and running!")
def pre_build(self, config):
vagrant_start_cmd = "vagrant up"
@@ -29,14 +30,15 @@ def pre_build(self, config):
if (config.pre_build_cmd):
vagrant_start_cmd = config.pre_build_cmd + utils.host_cmd_sep() + vagrant_start_cmd
- def cli_exit_event():
+ def cli_exit_event(signal, frame):
if (config.no_shutdown):
+ print "INFO: Vagrant J2V8 machine will continue running..."
return
print "Waiting for vagrant virtual-machine to exit..."
self.exec_host_cmd("vagrant halt", config)
- atexit.register(cli_exit_event)
+ signal.signal(signal.SIGINT, cli_exit_event)
self.exec_host_cmd(vagrant_start_cmd, config)
@@ -60,6 +62,7 @@ def exec_build(self, config):
def post_build(self, config):
if (config.no_shutdown):
+ print "INFO: Vagrant J2V8 machine will continue running..."
return
self.exec_host_cmd("vagrant halt", config)
diff --git a/cmake/BuildUtils.cmake b/cmake/BuildUtils.cmake
index b65180262..1b7d4ee3b 100644
--- a/cmake/BuildUtils.cmake
+++ b/cmake/BuildUtils.cmake
@@ -1,5 +1,6 @@
macro (link_static_crt)
+ message("Linking against static MSVCRT")
foreach(flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
diff --git a/cmake/FindJava.cmake b/cmake/FindJava.cmake
index 4159b7228..00dd9d56b 100644
--- a/cmake/FindJava.cmake
+++ b/cmake/FindJava.cmake
@@ -93,8 +93,8 @@ else()
set(Java_ROOT "$ENV{JAVA_HOME}")
endif()
-if ("${Java_ROOT}" STREQUAL "")
+if("${Java_ROOT}" STREQUAL "")
message(FATAL_ERROR "Unable to locate Java JDK")
endif()
-message ("Using Java-Root: ${Java_ROOT}")
+message("Using Java-Root: ${Java_ROOT}")
diff --git a/cmake/NodeJsUtils.cmake b/cmake/NodeJsUtils.cmake
index 2f0876a63..5f6c3b9d3 100644
--- a/cmake/NodeJsUtils.cmake
+++ b/cmake/NodeJsUtils.cmake
@@ -1,7 +1,27 @@
-
-function (get_njs_libs nodejs_dir config_name)
+#-----------------------------------------------------------------------
+# Expects a list of absolute paths to the expectd Node.js static libraries
+# and exits CMake with a fatal error if one of the libs does not exist.
+#-----------------------------------------------------------------------
+function (assert_nodejs_libs_exist)
+ # ARGN: a list of absolute paths
+ set (njs_libs ${ARGN})
+
+ foreach(lib ${njs_libs})
+ if (NOT EXISTS ${lib})
+ message(FATAL_ERROR "ERROR: Unable to locate required Node.js library: ${lib}")
+ endif()
+ endforeach()
+
+endfunction(assert_nodejs_libs_exist)
+#-----------------------------------------------------------------------
+# Based on the used operating system, look for the static Node.js libraries
+# that must be included for linking the J2V8 JNI native bridge code.
+#-----------------------------------------------------------------------
+function (get_njs_libs nodejs_dir config_name fail_on_missing_libs)
#{
+ #-----------------------------------------------------------------------
if (CMAKE_SYSTEM_NAME STREQUAL "Windows")
+ #-----------------------------------------------------------------------
#{
# base directories for Node.js link libraries
set (njs_build ${nodejs_dir}/build/${config_name})
@@ -10,7 +30,7 @@ function (get_njs_libs nodejs_dir config_name)
set (njs_extra ${nodejs_dir}/${config_name})
set (njs_extra_lib ${nodejs_dir}/${config_name}/lib)
- # project link libraries
+ # Node.js link libraries
set (njs_libs
# nodejs/build/$Config/lib
${njs_build_lib}/standalone_inspector.lib
@@ -43,7 +63,15 @@ function (get_njs_libs nodejs_dir config_name)
# nodejs/$Config
${njs_extra}/cctest.lib
+ )
+
+ # verify that all required Node.js libs actually exist
+ if (${fail_on_missing_libs})
+ assert_nodejs_libs_exist(${njs_libs})
+ endif()
+ # additional link libraries
+ set (njs_libs ${njs_libs}
# additional windows libs, required by Node.js
Dbghelp
Shlwapi
@@ -51,12 +79,14 @@ function (get_njs_libs nodejs_dir config_name)
set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE)
#}
+ #-----------------------------------------------------------------------
elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
+ #-----------------------------------------------------------------------
#{
# base directories for Node.js link libraries
set (njs_out ${nodejs_dir}/out/${config_name})
- # project link libraries
+ # Node.js link libraries
set (njs_libs
# v8 libs
${njs_out}/libv8_base.a
@@ -70,24 +100,35 @@ function (get_njs_libs nodejs_dir config_name)
${njs_out}/libgtest.a
${njs_out}/libhttp_parser.a
${njs_out}/libuv.a
- -force_load ${njs_out}/libnode.a
${njs_out}/libopenssl.a
${njs_out}/libzlib.a
)
+ # verify that all required Node.js libs actually exist
+ if (${fail_on_missing_libs})
+ assert_nodejs_libs_exist(${njs_libs} ${njs_out}/libnode.a)
+ endif()
+
+ # additional link libraries
+ set (njs_libs ${njs_libs}
+ # Node.js libs that require special linker treatments
+ -force_load ${njs_out}/libnode.a
+ )
+
set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE)
#}
+ #-----------------------------------------------------------------------
elseif(CMAKE_SYSTEM_NAME STREQUAL "Android")
+ #-----------------------------------------------------------------------
#{
# base directories for Node.js link libraries
set (njs_out_target ${nodejs_dir}/out/${config_name}/obj.target)
set (njs_out_v8 ${nodejs_dir}/out/${config_name}/obj.target/deps/v8/src)
set (njs_out_deps ${nodejs_dir}/out/${config_name}/obj.target/deps)
- # project link libraries
+ # Node.js link libraries
set (njs_libs
# node libs
- -Wl,--start-group
${njs_out_deps}/uv/libuv.a
${njs_out_deps}/openssl/libopenssl.a
${njs_out_deps}/http_parser/libhttp_parser.a
@@ -101,25 +142,40 @@ function (get_njs_libs nodejs_dir config_name)
${njs_out_v8}/libv8_libplatform.a
${njs_out_v8}/libv8_libbase.a
${njs_out_v8}/libv8_libsampler.a
+ )
- -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive
+ # verify that all required Node.js libs actually exist
+ if (${fail_on_missing_libs})
+ assert_nodejs_libs_exist(${njs_libs} ${njs_out_target}/libnode.a)
+ endif()
+ # finalize linker settings
+ set (njs_libs
+ #
+ -Wl,--start-group
+ # the carefree libs
+ ${njs_libs}
+
+ # Node.js libs that require special linker treatments
+ -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive
-Wl,--end-group
+ #
)
set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE)
#}
+ #-----------------------------------------------------------------------
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
+ #-----------------------------------------------------------------------
#{
# base directories for Node.js link libraries
set (njs_out_target ${nodejs_dir}/out/${config_name}/obj.target)
set (njs_out_v8 ${nodejs_dir}/out/${config_name}/obj.target/deps/v8/src)
set (njs_out_deps ${nodejs_dir}/out/${config_name}/obj.target/deps)
- # project link libraries
+ # Node.js link libraries
set (njs_libs
# node libs
- -Wl,--start-group
${njs_out_deps}/uv/libuv.a
${njs_out_deps}/openssl/libopenssl.a
${njs_out_deps}/http_parser/libhttp_parser.a
@@ -133,10 +189,24 @@ function (get_njs_libs nodejs_dir config_name)
${njs_out_v8}/libv8_libplatform.a
${njs_out_v8}/libv8_libbase.a
${njs_out_v8}/libv8_libsampler.a
+ )
+
+ # verify that all required Node.js libs actually exist
+ if (${fail_on_missing_libs})
+ assert_nodejs_libs_exist(${njs_libs} ${njs_out_target}/libnode.a)
+ endif()
- -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive
+ # finalize linker settings
+ set (njs_libs
+ #
+ -Wl,--start-group
+ # the carefree libs
+ ${njs_libs}
+ # Node.js libs that require special linker treatments
+ -Wl,--whole-archive ${njs_out_target}/libnode.a -Wl,--no-whole-archive
-Wl,--end-group
+ #
)
set (njs_${config_name}_libs ${njs_libs} PARENT_SCOPE)
@@ -144,3 +214,4 @@ function (get_njs_libs nodejs_dir config_name)
endif()
#}
endfunction (get_njs_libs)
+#-----------------------------------------------------------------------
diff --git a/docker/android/Dockerfile b/docker/android/Dockerfile
index 7b9f7aeb4..415e93763 100644
--- a/docker/android/Dockerfile
+++ b/docker/android/Dockerfile
@@ -14,6 +14,9 @@ WORKDIR /temp/docker/shared/
COPY ./shared/install.debian.packages.sh /temp/docker/shared
RUN ./install.debian.packages.sh
+# install the heaviest dependencies first
+# (this keeps the big things cached in docker, even if we need to modify one of the shell scripts
+# of one of the lighter dependencies below)
ENV NDK_VERSION "r13b"
ENV NDK_NAME "android-ndk-$NDK_VERSION-linux-x86_64"
RUN echo "Preparing Android NDK..." && \
@@ -30,19 +33,6 @@ RUN echo "Preparing Android GCC-Toolchain..." && \
ENV NDK "/build/android-ndk-$NDK_VERSION"
ENV PATH "$PATH:/build/android-gcc-toolchain:$NDK"
-COPY ./shared/install.jdk.sh /temp/docker/shared
-RUN ./install.jdk.sh
-ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131"
-
-COPY ./shared/install.cmake.sh /temp/docker/shared
-RUN ./install.cmake.sh
-ENV PATH "$PATH:/opt/cmake/bin"
-
-COPY ./shared/install.gradle.sh /temp/docker/shared
-RUN ./install.gradle.sh
-ENV GRADLE_HOME "/opt/gradle-3.5"
-ENV PATH "$PATH:$GRADLE_HOME/bin"
-
RUN echo "Preparing Android SDK..." && \
wget -qO- http://dl.google.com/android/android-sdk_r23-linux.tgz | \
tar xvz -C /usr/local/ && \
@@ -54,18 +44,33 @@ ENV ANDROID_HOME "/usr/local/android-sdk"
ENV PATH "$PATH:$ANDROID_HOME/tools"
ENV PATH "$PATH:$ANDROID_HOME/platform-tools"
+# Create fake keymap file
+RUN mkdir /usr/local/android-sdk/tools/keymaps && \
+ touch /usr/local/android-sdk/tools/keymaps/en-us
+
+# install the required license for sdk-build-tools
+RUN mkdir -p $ANDROID_HOME/licenses && echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55\n" > $ANDROID_HOME/licenses/android-sdk-license
+
+# java must be installed at this point, because the following android CLI commands depend on it
+COPY ./shared/install.jdk.sh /temp/docker/shared
+RUN ./install.jdk.sh
+ENV JAVA_HOME "/opt/jdk/jdk1.8.0_131"
+
# set up the android emulator & android images for running the unit tests
# see: https://github.com/tracer0tong/android-emulator
RUN ( sleep 4 && while [ 1 ]; do sleep 1; echo y; done ) | android update sdk --no-ui --force -a --filter \
build-tools-24.0.3,tools,platform-tools,android-19,extra-android-m2repository,sys-img-x86-android-19,sys-img-armeabi-v7a-android-19 && \
echo "y" | android update adb
-# Create fake keymap file
-RUN mkdir /usr/local/android-sdk/tools/keymaps && \
- touch /usr/local/android-sdk/tools/keymaps/en-us
+# now install the rest of the tools that are more lightweight
+COPY ./shared/install.cmake.sh /temp/docker/shared
+RUN ./install.cmake.sh
+ENV PATH "$PATH:/opt/cmake/bin"
-# install the required license for sdk-build-tools
-RUN mkdir -p $ANDROID_HOME/licenses && echo -e "\n8933bad161af4178b1185d1a37fbf41ea5269c55\n" > $ANDROID_HOME/licenses/android-sdk-license
+COPY ./shared/install.gradle.sh /temp/docker/shared
+RUN ./install.gradle.sh
+ENV GRADLE_HOME "/opt/gradle-3.5"
+ENV PATH "$PATH:$GRADLE_HOME/bin"
# download the most critical gradle dependencies for the build beforehand
RUN mkdir -p /temp
diff --git a/docker/android/kill_supervisor.py b/docker/android/kill_supervisor.py
old mode 100644
new mode 100755
diff --git a/docker/android/start-emulator.template.sh b/docker/android/start-emulator.template.sh
old mode 100644
new mode 100755
diff --git a/docker/android/supervisord.template.conf b/docker/android/supervisord.template.conf
index 7fafa057e..f2a4e6a42 100644
--- a/docker/android/supervisord.template.conf
+++ b/docker/android/supervisord.template.conf
@@ -16,7 +16,7 @@ stopasgroup=true
killasgroup=true
[eventlistener:emulator_exit]
-command=/j2v8/docker/android/kill_supervisor.py
+command=python /j2v8/docker/android/kill_supervisor.py
process_name=emulator
events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL
stdout_logfile=/dev/stdout
@@ -38,7 +38,7 @@ stopasgroup=true
killasgroup=true
[eventlistener:tests_exit]
-command=/j2v8/docker/android/kill_supervisor.py
+command=python /j2v8/docker/android/kill_supervisor.py
process_name=tests
events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL
stdout_logfile=/dev/stdout
@@ -60,7 +60,7 @@ stopasgroup=true
killasgroup=true
[eventlistener:logcat_exit]
-command=/j2v8/docker/android/kill_supervisor.py
+command=python /j2v8/docker/android/kill_supervisor.py
process_name=tests
events=PROCESS_STATE_EXITED,PROCESS_STATE_FATAL
stdout_logfile=/dev/stdout
diff --git a/docker/android/wait-for-emulator.sh b/docker/android/wait-for-emulator.sh
old mode 100644
new mode 100755
diff --git a/docker/shared/install.alpine.packages.sh b/docker/shared/install.alpine.packages.sh
old mode 100644
new mode 100755
diff --git a/docker/shared/install.jdk.sh b/docker/shared/install.jdk.sh
index b7d7a0f75..9942fa013 100755
--- a/docker/shared/install.jdk.sh
+++ b/docker/shared/install.jdk.sh
@@ -12,5 +12,7 @@ echo "Preparing JDK..."
curl -L -C - -b "oraclelicense=accept-securebackup-cookie" -O http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz
mkdir -p /opt/jdk
tar x -C /opt/jdk -f jdk-8u131-linux-x64.tar.gz
+
update-alternatives --install /usr/bin/java java /opt/jdk/jdk1.8.0_131/bin/java 100
update-alternatives --install /usr/bin/javac javac /opt/jdk/jdk1.8.0_131/bin/javac 100
+update-alternatives --install /usr/bin/javah javah /opt/jdk/jdk1.8.0_131/bin/javah 100
diff --git a/docker/win32/install.jdk.ps1 b/docker/win32/install.jdk.ps1
index f5c94d9f9..1d369da7e 100644
--- a/docker/win32/install.jdk.ps1
+++ b/docker/win32/install.jdk.ps1
@@ -12,6 +12,10 @@ Start-Process C:/jdk.exe -Wait `
$env:JAVA_HOME = 'C:\Program Files\Java\jdk1.8.0_131';
[Environment]::SetEnvironmentVariable('JAVA_HOME', $env:JAVA_HOME, [EnvironmentVariableTarget]::Machine);
+# add Java tools to path
+$env:PATH = $env:JAVA_HOME+'\bin;'+$env:PATH;
+[Environment]::SetEnvironmentVariable('PATH', $env:PATH, [EnvironmentVariableTarget]::Machine);
+
Write-Host 'Removing ...';
Remove-Item C:\jdk.exe -Force;
diff --git a/gradle.properties b/gradle.properties
new file mode 100644
index 000000000..85eb5dfb5
--- /dev/null
+++ b/gradle.properties
@@ -0,0 +1,3 @@
+#increase jvm heap space available for gradle
+#(allows to run dex in the same process as gradle)
+org.gradle.jvmargs=-Xmx4608M
diff --git a/j2v8-cli.cmd b/j2v8-cli.cmd
new file mode 100644
index 000000000..0652878a4
--- /dev/null
+++ b/j2v8-cli.cmd
@@ -0,0 +1,5 @@
+@echo off
+
+doskey build=python build.py $*
+doskey nodejs=python nodejs.py $*
+doskey citests=python build_system\run_tests.py $*
diff --git a/j2v8-cli.sh b/j2v8-cli.sh
new file mode 100755
index 000000000..3c84bae79
--- /dev/null
+++ b/j2v8-cli.sh
@@ -0,0 +1,3 @@
+alias build="python build.py"
+alias nodejs="python nodejs.py"
+alias citests="python build_system/run_tests.py"
diff --git a/jni/com_eclipsesource_v8_V8Impl.cpp b/jni/com_eclipsesource_v8_V8Impl.cpp
index 4dfc7f5eb..f4a0e6c5b 100644
--- a/jni/com_eclipsesource_v8_V8Impl.cpp
+++ b/jni/com_eclipsesource_v8_V8Impl.cpp
@@ -439,6 +439,15 @@ JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isRunning
#endif
}
+JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isNodeCompatible
+ (JNIEnv *, jclass) {
+ #ifdef NODE_COMPATIBLE
+ return true;
+ #else
+ return false;
+ #endif
+}
+
JNIEXPORT jlong JNICALL Java_com_eclipsesource_v8_V8__1createIsolate
(JNIEnv *env, jobject v8, jstring globalAlias) {
V8Runtime* runtime = new V8Runtime();
diff --git a/jni/com_eclipsesource_v8_V8Impl.h b/jni/com_eclipsesource_v8_V8Impl.h
index 2e9fe4676..6739431b8 100644
--- a/jni/com_eclipsesource_v8_V8Impl.h
+++ b/jni/com_eclipsesource_v8_V8Impl.h
@@ -819,6 +819,14 @@ JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1pumpMessageLoop
JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isRunning
(JNIEnv *, jclass, jlong);
+/*
+ * Class: com_eclipsesource_v8_V8
+ * Method: _isNodeCompatible
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_com_eclipsesource_v8_V8__1isNodeCompatible
+ (JNIEnv *, jclass);
+
#ifdef __cplusplus
}
#endif
diff --git a/node.patches/7.9.0.diff b/node.patches/7.9.0.diff
new file mode 100644
index 000000000..82aeda68e
--- /dev/null
+++ b/node.patches/7.9.0.diff
@@ -0,0 +1,67 @@
+diff --git a/common.gypi b/common.gypi
+index 147cc70f..40e44baf 100644
+--- a/common.gypi
++++ b/common.gypi
+@@ -190,7 +190,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 0, # /EHsc
+diff --git a/deps/cares/common.gypi b/deps/cares/common.gypi
+index 609ad62a..d714cdd7 100644
+--- a/deps/cares/common.gypi
++++ b/deps/cares/common.gypi
+@@ -80,7 +80,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 1, # /EHsc
+diff --git a/deps/uv/common.gypi b/deps/uv/common.gypi
+index 470b7338..8dc3b3f9 100644
+--- a/deps/uv/common.gypi
++++ b/deps/uv/common.gypi
+@@ -87,7 +87,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 1, # /EHsc
+diff --git a/src/node.h b/src/node.h
+index 1255a4af..66911873 100644
+--- a/src/node.h
++++ b/src/node.h
+@@ -417,7 +417,7 @@ extern "C" NODE_EXTERN void node_module_register(void* mod);
+ #ifdef NODE_SHARED_MODE
+ # define NODE_CTOR_PREFIX
+ #else
+-# define NODE_CTOR_PREFIX static
++# define NODE_CTOR_PREFIX
+ #endif
+
+ #if defined(_MSC_VER)
+diff --git a/vcbuild.bat b/vcbuild.bat
+index 01750a4a..f8392e4d 100644
+--- a/vcbuild.bat
++++ b/vcbuild.bat
+@@ -176,8 +176,8 @@ goto run
+ if defined noprojgen goto msbuild
+
+ @rem Generate the VS project.
+-echo configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG%
+-python configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG%
++echo configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% --enable-static
++python configure %configure_flags% --dest-cpu=%target_arch% --tag=%TAG% --enable-static
+ if errorlevel 1 goto create-msvs-files-failed
+ if not exist node.sln goto create-msvs-files-failed
+ echo Project files generated.
diff --git a/node.patches/8.10.1.diff b/node.patches/8.10.1.diff
new file mode 100644
index 000000000..37f6af6fe
--- /dev/null
+++ b/node.patches/8.10.1.diff
@@ -0,0 +1,52 @@
+diff --git a/common.gypi b/common.gypi
+index ea08e803..fa94c9f4 100644
+--- a/common.gypi
++++ b/common.gypi
+@@ -189,7 +189,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 0, # /EHsc
+diff --git a/deps/cares/common.gypi b/deps/cares/common.gypi
+index 609ad62a..d714cdd7 100644
+--- a/deps/cares/common.gypi
++++ b/deps/cares/common.gypi
+@@ -80,7 +80,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 1, # /EHsc
+diff --git a/deps/uv/common.gypi b/deps/uv/common.gypi
+index ec482340..807de0aa 100644
+--- a/deps/uv/common.gypi
++++ b/deps/uv/common.gypi
+@@ -93,7 +93,7 @@
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'StringPooling': 'true', # pool string literals
+- 'DebugInformationFormat': 3, # Generate a PDB
++ 'DebugInformationFormat': 0, # Generate a PDB
+ 'WarningLevel': 3,
+ 'BufferSecurityCheck': 'true',
+ 'ExceptionHandling': 1, # /EHsc
+diff --git a/src/node.h b/src/node.h
+index 596769a6..21dbc38d 100644
+--- a/src/node.h
++++ b/src/node.h
+@@ -433,7 +433,7 @@ extern "C" NODE_EXTERN void node_module_register(void* mod);
+ #ifdef NODE_SHARED_MODE
+ # define NODE_CTOR_PREFIX
+ #else
+-# define NODE_CTOR_PREFIX static
++# define NODE_CTOR_PREFIX
+ #endif
+
+ #if defined(_MSC_VER)
diff --git a/nodejs.py b/nodejs.py
index 81374b00e..bd6111ea2 100644
--- a/nodejs.py
+++ b/nodejs.py
@@ -1,11 +1,12 @@
"""
-Utility-belt script to manage the Node.js dependency
+Utility-belt script to manage the Node.js/V8 dependency
"""
import argparse
import collections
import fnmatch
import glob
import io
+from itertools import ifilter
import os
import sys
import tarfile
@@ -16,6 +17,8 @@
import build_system.build_utils as utils
import build_system.build_settings as settings
+CMD_LINEBREAK = "\n\n"
+
# helper classes to show zipping progress
# original idea: https://stackoverflow.com/a/3668977/425532
class ReadProgressFileObject(io.FileIO):
@@ -39,14 +42,11 @@ def write(self, b):
sys.stdout.flush()
return io.FileIO.write(self, b)
-Command = collections.namedtuple("Command", "aliases function")
+Command = collections.namedtuple("Command", "name function help")
DepsDirectory = collections.namedtuple("DepsDirectory", "path include")
-# Command-Line setup
-parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
-
#-----------------------------------------------------------------------
-def flush_cache(silent = False):
+def flush_cache(args = None, silent = False):
if not silent:
print "[flush-cache]"
@@ -56,42 +56,66 @@ def flush_cache(silent = False):
print "Done"
cmd_flush_cache = Command(
- aliases=["flush-cache", "fc"],
+ name="flush-cache",
function=flush_cache,
+ help="Move any Node.js/V8 native build-artifacts (.o/.a/.lib) from the './node' directory into the 'node.out' cache subdirectory\n" + \
+ " of the respective vendor/platform/architecture."
)
#-----------------------------------------------------------------------
-def git_init():
- print "[git-init]"
+def git_clone(args):
+ print "[git-clone]"
# TODO: add CLI overide options
# - Node version
# - J2V8 version
- utils.store_nodejs_output(None, ".")
+ flush_cache(silent=True)
if (not os.path.exists("node")):
print "Cloning Node.js version: " + settings.NODE_VERSION
# NOTE: autocrlf=false is very important for linux based cross-compiles of Node.js to work on a windows docker host
utils.execute("git clone https://github.com/nodejs/node --config core.autocrlf=false --depth 1 --branch v" + settings.NODE_VERSION)
else:
- print "Node.js is already cloned & checked out"
- apply_diff(True)
+ print "Skipped git-clone: Node.js source-code is already cloned & checked out at the './node' directory."
+
+ print "Done"
+
+cmd_git_clone = Command(
+ name="git-clone",
+ function=git_clone,
+ help=" Clone the C++ source-code from the official Node.js GitHub repository." + \
+ "\n (the Node.js version branch from build_settings.py will be checked out automatically)"
+)
+#-----------------------------------------------------------------------
+def git_checkout(args):
+ print "[git-checkout]"
+
+ flush_cache(silent=True)
+
+ if (os.path.exists("node")):
+ print "Checkout Node.js version: " + settings.NODE_VERSION
+
+ # TODO: is there a way to fetch/checkout only a single remote tag
+ utils.execute("git fetch -v --progress --tags --depth 1 origin", "node")
+ utils.execute("git checkout --progress tags/v" + settings.NODE_VERSION + " -b v" + settings.NODE_VERSION, "node")
+ else:
+ print "ERROR: Node.js source-code was not yet cloned into the './node' directory, run 'python nodejs.py git-clone' first."
print "Done"
-cmd_git_init = Command(
- aliases=["git-init", "gi"],
- function=git_init
+cmd_git_checkout = Command(
+ name="git-checkout",
+ function=git_checkout,
+ help="Checkout the correct git branch for the Node.js version specified in build_settings.py"
)
#-----------------------------------------------------------------------
-def package():
+def package(platforms = None):
print "[package]"
- platforms = sys.argv[2:]
- full = len(platforms) == 0
+ full = platforms == None or len(platforms) == 0
# make sure all node.js binaries are stored in the cache before packaging
- flush_cache(True)
+ flush_cache(silent=True)
# C++ header files
# NOTE: see https://stackoverflow.com/a/4851555/425532 why this weird syntax is necessary here
@@ -116,7 +140,7 @@ def __add_platform_deps(platform, include, vendor = None):
) for arch in target.architectures
]
- # speciffy the platforms & file patterns that should be included
+ # specify the platforms & file patterns that should be included
__add_platform_deps(c.target_android, [".o", ".a"])
__add_platform_deps(c.target_linux, [".o", ".a"])
__add_platform_deps(c.target_linux, [".o", ".a"], vendor = c.vendor_alpine)
@@ -169,11 +193,58 @@ def __add_platform_deps(platform, include, vendor = None):
print "generated: " + package_filename
cmd_package = Command(
- aliases=["package", "pkg"],
- function=package
+ name="package",
+ function=package,
+ help="Create a .tar.bz2 dependency package with all the currently built Node.js/V8 binaries from the './node.out' cache directories."
)
#-----------------------------------------------------------------------
-def store_diff():
+def touch(platforms = None):
+ full = platforms == None or len(platforms) == 0
+
+ # make sure all node.js binaries are stored in the cache before resetting file-times
+ flush_cache(silent=True)
+
+ dependencies = {
+ "list": [],
+ }
+
+ # TODO: extract shared code between this and "package" command
+ def __add_platform_deps(platform, include, vendor = None):
+ target = bc.platform_configs.get(platform)
+ vendor_str = (vendor + "-" if vendor else "")
+ selected = (vendor_str + platform) in platforms
+
+ if (full or selected):
+ dependencies["list"] += [
+ DepsDirectory(
+ path="./node.out/" + vendor_str + platform + "." + arch + "/",
+ include=["j2v8.node.out"] + include
+ ) for arch in target.architectures
+ ]
+
+ # specify the platforms & file patterns that should be included
+ __add_platform_deps(c.target_android, [".o", ".a"])
+ __add_platform_deps(c.target_linux, [".o", ".a"])
+ __add_platform_deps(c.target_linux, [".o", ".a"], vendor = c.vendor_alpine)
+ __add_platform_deps(c.target_macos, [".a"])
+ __add_platform_deps(c.target_win32, [".lib"])
+
+ # set modification-time of all found binary files
+ for dep in dependencies["list"]:
+ print "set current file-time " + dep.path
+ for root, dirs, filenames in os.walk(dep.path):
+ for pattern in dep.include:
+ for file_name in fnmatch.filter(filenames, '*' + pattern):
+ file_path = os.path.join(root, file_name)
+ utils.touch(file_path)
+
+cmd_touch = Command(
+ name="touch",
+ function=touch,
+ help="Set modification-time of all currently built Node.js/V8 binaries in the './node.out' cache directories."
+)
+#-----------------------------------------------------------------------
+def store_diff(args):
print "[store-diff]"
patch_file = os.path.join("..", "node.patches", settings.NODE_VERSION + ".diff")
@@ -183,11 +254,14 @@ def store_diff():
print "Done"
cmd_store_diff = Command(
- aliases=["store-diff", "sd"],
- function=store_diff
+ name="store-diff",
+ function=store_diff,
+ help="Create a patch-file in the './node.patches' directory with the current local modifications\n" +
+ " to the Node.js/V8 source-code.\n" +
+ " (the Node.js version from build_settings.py will be included in the patch filename)."
)
#-----------------------------------------------------------------------
-def apply_diff(silent = False):
+def apply_diff(args, silent = False):
if not silent:
print "[apply-diff]"
@@ -203,32 +277,56 @@ def apply_diff(silent = False):
print "Done"
cmd_apply_diff = Command(
- aliases=["apply-diff", "ad"],
- function=apply_diff
+ name="apply-diff",
+ function=apply_diff,
+ help=" Apply a previously created patch-file to the currently checked out Node.js/V8 source-code."
)
#-----------------------------------------------------------------------
-all_cmds = [
- cmd_flush_cache,
- cmd_git_init,
- cmd_package,
- cmd_store_diff,
- cmd_apply_diff,
-]
-
-parser.add_argument("cmd",
- metavar="command",
- nargs=1,
- type=str,
- choices=[cmd for commands in all_cmds for cmd in commands.aliases])
-
-parser.add_argument("rest",
- nargs="*",
- help=argparse.SUPPRESS)
-
-args = parser.parse_args()
-
-for cmd_tuple in all_cmds:
- if (args.cmd[0] in cmd_tuple.aliases):
- cmd_tuple.function()
- break
+#-----------------------------------------------------------------------
+# Command-Line setup
+#-----------------------------------------------------------------------
+commands = {
+ "git": {
+ "__help": " Download and manage the Node.js/V8 source code for building J2V8 from source.",
+ "clone": cmd_git_clone,
+ "checkout": cmd_git_checkout,
+ },
+ "bin": {
+ "__help": " Manage the binary build-artifacts that are produced by Node.js/V8 builds.",
+ "flush": cmd_flush_cache,
+ "package": cmd_package,
+ "touch": cmd_touch,
+ },
+ "diff": {
+ "__help": "Create and apply Git patch-files for Node.js that are required for interoperability with J2V8.",
+ "create": cmd_store_diff,
+ "apply": cmd_apply_diff,
+ },
+}
+#-----------------------------------------------------------------------
+def parse_sub_command(args, choices, help_formatter, extra_args = None):
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
+ help_str = [c + " " + help_formatter(c) for c in choices]
+ parser.add_argument("command", help="\n\n".join(help_str) + "\n\n", choices=choices)
+
+ if (extra_args):
+ extra_args(parser)
+
+ args = parser.parse_args(args)
+ return args
+#-----------------------------------------------------------------------
+
+# parse first level command
+args = parse_sub_command(sys.argv[1:2], commands, lambda c: commands[c].get("__help"))
+lvl1_cmd = commands.get(args.command)
+
+# parse second level command
+sub_choices = filter(lambda x: x != "__help", lvl1_cmd)
+args = parse_sub_command(sys.argv[2:], sub_choices, lambda c: lvl1_cmd[c].help, \
+ lambda parser: parser.add_argument("args", nargs="*"))
+lvl2_cmd = args.command
+
+# get the final command handler and delegate all further parameters to it
+cmd_handler = lvl1_cmd.get(lvl2_cmd)
+cmd_handler.function(sys.argv[3:])
diff --git a/pom.xml b/pom.xml
index c409b634b..02a524200 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,18 +1,31 @@
4.0.0
- UTF-8
- alpine-linux
+ UTF-8
+
+ linux
gtk
- x86_64
+ x86_64
com.eclipsesource.j2v8
- j2v8_alpine-linux_x86_64
+ j2v8_linux_x86_64
4.8.0-SNAPSHOT
bundle
- j2v8_alpine-linux_x86_64
+ j2v8_linux_x86_64
J2V8 is a set of Java bindings for V8
https://github.com/eclipsesource/j2v8
diff --git a/src/main/java/com/eclipsesource/v8/V8.java b/src/main/java/com/eclipsesource/v8/V8.java
index 65d4500cf..08d0d4980 100644
--- a/src/main/java/com/eclipsesource/v8/V8.java
+++ b/src/main/java/com/eclipsesource/v8/V8.java
@@ -249,7 +249,9 @@ private void notifyReferenceDisposed(final V8Value object) {
private static void checkNativeLibraryLoaded() {
if (!nativeLibraryLoaded) {
- String message = "J2V8 native library not loaded (" + LibraryLoader.computeLibraryShortName(true) + ")";
+ String vendorName = LibraryLoader.computeLibraryShortName(true);
+ String baseName = LibraryLoader.computeLibraryShortName(true);
+ String message = "J2V8 native library not loaded (" + baseName + "/" + vendorName + ")";
if (nativeLoadError != null) {
throw new IllegalStateException(message, nativeLoadError);
@@ -1565,6 +1567,19 @@ protected void releaseMethodDescriptor(final long v8RuntimePtr, final long metho
private native static boolean _isRunning(final long v8RuntimePtr);
+ private native static boolean _isNodeCompatible();
+
+ public static boolean isNodeCompatible() {
+ if (!nativeLibraryLoaded) {
+ synchronized (lock) {
+ if (!nativeLibraryLoaded) {
+ load(null);
+ }
+ }
+ }
+ return _isNodeCompatible();
+ }
+
void addObjRef(final V8Value reference) {
objectReferences++;
if (!referenceHandlers.isEmpty()) {
diff --git a/src/test/java/com/eclipsesource/v8/NodeJSTest.java b/src/test/java/com/eclipsesource/v8/NodeJSTest.java
index 3d2aa95c7..7d934a969 100644
--- a/src/test/java/com/eclipsesource/v8/NodeJSTest.java
+++ b/src/test/java/com/eclipsesource/v8/NodeJSTest.java
@@ -13,6 +13,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
import java.io.File;
import java.io.IOException;
@@ -20,12 +21,20 @@
import org.junit.After;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Test;
public class NodeJSTest {
private NodeJS nodeJS;
+ @BeforeClass
+ public static void beforeClass() {
+ // only run this test if the underlying native J2V8 library was compiled
+ // with the Node.js features included, otherwise just skip all the tests
+ assumeTrue(V8.isNodeCompatible());
+ }
+
@Before
public void setup() {
nodeJS = NodeJS.createNodeJS();