From 77767010d7c0b2e813e7975da3aeec851ac1ddd1 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 8 Sep 2024 13:24:03 +0330 Subject: [PATCH 01/82] Add base image for Java benchmarks on OpenWhisk --- config/systems.json | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/config/systems.json b/config/systems.json index 5a4077a2..9d1baaa7 100644 --- a/config/systems.json +++ b/config/systems.json @@ -316,6 +316,24 @@ "minio": "7.0.16" } } + }, + "java": { + "base_images": { + "8": "openwhisk/actionloop-java-v8", + }, + "images": [ + "function" + ], + "username": "docker_user", + "deployment": { + "files": [ + "index.js", + "storage.js" + ], + "packages": { + "minio": "8.5.9" + } + } } }, "architecture": ["x64"], From 13aa9ec6c4385449d01c22060654fa8313ee1b34 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 8 Sep 2024 17:55:11 +0330 Subject: [PATCH 02/82] Add Dockerfile for running Java benchmarks on OpenWhisk --- dockerfiles/openwhisk/java/Dockerfile.function | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 dockerfiles/openwhisk/java/Dockerfile.function diff --git a/dockerfiles/openwhisk/java/Dockerfile.function b/dockerfiles/openwhisk/java/Dockerfile.function new file mode 100644 index 00000000..d86cd461 --- /dev/null +++ b/dockerfiles/openwhisk/java/Dockerfile.function @@ -0,0 +1,8 @@ +ARG BASE_IMAGE +FROM $BASE_IMAGE +COPY . /function/ + +RUN apt-get update && apt-get install -y maven + +# Check if pom.xml exists before running Maven +RUN if [ -f ./pom.xml ]; then mvn clean install; else echo "pom.xml not found, aborting build." && exit 1; fi From b02657713ecb51c403cb0f6ed2e4dc545100aecc Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 8 Sep 2024 19:30:20 +0330 Subject: [PATCH 03/82] Update base image of java on OpenWhisk --- config/systems.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config/systems.json b/config/systems.json index 9d1baaa7..3bbb5acb 100644 --- a/config/systems.json +++ b/config/systems.json @@ -319,7 +319,7 @@ }, "java": { "base_images": { - "8": "openwhisk/actionloop-java-v8", + "8": "openwhisk/java8action" }, "images": [ "function" @@ -327,8 +327,8 @@ "username": "docker_user", "deployment": { "files": [ - "index.js", - "storage.js" + "Main.java", + "Storage.java" ], "packages": { "minio": "8.5.9" From d478dc1d01d249c4c51d9ad8a30277214e0f2d76 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 10 Sep 2024 21:02:36 +0330 Subject: [PATCH 04/82] Add Java-based handler for OpenWhisk --- benchmarks/wrappers/openwhisk/java/Main.java | 55 ++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 benchmarks/wrappers/openwhisk/java/Main.java diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java new file mode 100644 index 00000000..d21960ae --- /dev/null +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -0,0 +1,55 @@ +import com.google.gson.JsonObject; +import java.util.faas.Function; +import java.time.Instant; +import java.time.Duration; +import java.io.File; +import java.io.IOException; + + +public class Main { + public static JsonObject main(JsonObject args) { + + // Logger logger = Logger.getLogger(FunctionHandler.class.getName()); + // logger.setLevel(Level.INFO); + + Gson gson = new Gson(); + Function function = new Function(); + + Instant begin = Instant.now(); + JsonObject result = function.handler(args); + Instant end = Instant.now(); + + long computeTime = Duration.between(begin, end).toNanos() / 1000; // Convert nanoseconds to microseconds + + boolean isCold = false; + String fileName = "/cold_run"; + + File file = new File(fileName); + if (!file.exists()) { + isCold = true; + try { + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + // Convert to Unix timestamp in seconds.microseconds + String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); // Convert nanoseconds to microseconds + String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); + + String requestId = System.getenv("__OW_ACTIVATION_ID"); + + JsonObject jsonResult = new JsonObject(); + jsonObject.put("begin", formattedBegin); + jsonObject.put("end", formattedEnd); + jsonObject.put("request_id", "requestId"); + jsonObject.put("compute_time", computeTime); + jsonObject.put("is_cold", isCold); + jsonObject.put("result", result); + return jsonResult; + } +} + + + \ No newline at end of file From d36481cadca157d2b9ed2a73087251ac138d0f13 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 19 Sep 2024 03:35:44 +0330 Subject: [PATCH 05/82] Add example config file for running java benchmarks on OpenWhisk --- config/example2.json | 69 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 config/example2.json diff --git a/config/example2.json b/config/example2.json new file mode 100644 index 00000000..1cf21c81 --- /dev/null +++ b/config/example2.json @@ -0,0 +1,69 @@ +{ + "experiments": { + "deployment": "openwhisk", + "update_code": false, + "update_storage": false, + "download_results": false, + "runtime": { + "language": "java", + "version": "8" + }, + "type": "invocation-overhead", + "perf-cost": { + "benchmark": "110.dynamic-html", + "experiments": ["cold", "warm", "burst", "sequential"], + "input-size": "test", + "repetitions": 50, + "concurrent-invocations": 50, + "memory-sizes": [128, 256] + }, + "network-ping-pong": { + "invocations": 50, + "repetitions": 1000, + "threads": 1 + }, + "invocation-overhead": { + "repetitions": 5, + "N": 20, + "type": "payload", + "payload_begin": 1024, + "payload_end": 6251000, + "payload_points": 20, + "code_begin": 1048576, + "code_end": 261619712, + "code_points": 20 + }, + "eviction-model": { + "invocations": 1, + "function_copy_idx": 0, + "repetitions": 5, + "sleep": 1 + } + }, + "deployment": { + "openwhisk": { + "shutdownStorage": false, + "removeCluster": false, + "wskBypassSecurity": "true", + "wskExec": "wsk", + "experimentalManifest": false, + "docker_registry": { + "registry": "", + "username": "", + "password": "" + }, + "storage": { + "address": "", + "mapped_port": 9011, + "access_key": "", + "secret_key": "", + "instance_id": "", + "output_buckets": [], + "input_buckets": [], + "type": "minio" + } + + } + } + } + \ No newline at end of file From a7055e15f6c8a7fbb8f9ce9aff4864716d1b1e45 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 19 Sep 2024 03:41:50 +0330 Subject: [PATCH 06/82] Add JAVA enum to list of languages --- sebs/faas/function.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sebs/faas/function.py b/sebs/faas/function.py index 0fab7bcf..9ddad97f 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -263,6 +263,7 @@ def deserialize(cached_config: dict) -> "Trigger": class Language(Enum): PYTHON = "python" NODEJS = "nodejs" + JAVA = "java" # FIXME: 3.7+ python with future annotations @staticmethod @@ -299,7 +300,7 @@ def serialize(self) -> dict: @staticmethod def deserialize(config: dict) -> Runtime: - languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS} + languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS, "java": Language.JAVA} return Runtime(language=languages[config["language"]], version=config["version"]) From cdd1763c10fcc40f758ec6b8756a3113ece5cb61 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 19 Sep 2024 03:45:34 +0330 Subject: [PATCH 07/82] Add config of 601.hello-world (A simple java benchmark) --- benchmarks/600.java/601.hello-world/config.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 benchmarks/600.java/601.hello-world/config.json diff --git a/benchmarks/600.java/601.hello-world/config.json b/benchmarks/600.java/601.hello-world/config.json new file mode 100644 index 00000000..0c5d480e --- /dev/null +++ b/benchmarks/600.java/601.hello-world/config.json @@ -0,0 +1,6 @@ +{ + "timeout": 120, + "memory": 512, + "languages": ["java"] + } + \ No newline at end of file From e9b72b21c62bf010abd8d51ca24f498becb58d61 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 19 Sep 2024 04:12:42 +0330 Subject: [PATCH 08/82] Init maven structure of 601.hello-world and add some codes for running java benchmarks --- benchmarks/600.java/601.hello-world/java/pom.xml | 0 .../601.hello-world/java/src/java/Function.java | 0 benchmarks/wrappers/openwhisk/java/Storage.java | 0 sebs.py | 2 +- sebs/benchmark.py | 14 ++++++++++++++ 5 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 benchmarks/600.java/601.hello-world/java/pom.xml create mode 100644 benchmarks/600.java/601.hello-world/java/src/java/Function.java create mode 100644 benchmarks/wrappers/openwhisk/java/Storage.java diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/600.java/601.hello-world/java/pom.xml new file mode 100644 index 00000000..e69de29b diff --git a/benchmarks/600.java/601.hello-world/java/src/java/Function.java b/benchmarks/600.java/601.hello-world/java/src/java/Function.java new file mode 100644 index 00000000..e69de29b diff --git a/benchmarks/wrappers/openwhisk/java/Storage.java b/benchmarks/wrappers/openwhisk/java/Storage.java new file mode 100644 index 00000000..e69de29b diff --git a/sebs.py b/sebs.py index 80fb11ed..9334c6f6 100755 --- a/sebs.py +++ b/sebs.py @@ -64,7 +64,7 @@ def simplified_common_params(func): @click.option( "--language", default=None, - type=click.Choice(["python", "nodejs"]), + type=click.Choice(["python", "nodejs", "java"]), help="Benchmark language", ) @click.option("--language-version", default=None, type=str, help="Benchmark language version") diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 42adb4e7..4205fb54 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -316,6 +316,8 @@ def copy_code(self, output_dir): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], + "java": ["pom.xml"], + } path = os.path.join(self.benchmark_path, self.language_name) for file_type in FILES[self.language_name]: @@ -358,6 +360,16 @@ def add_deployment_files(self, output_dir): for file in handlers: shutil.copy2(file, os.path.join(output_dir)) + def add_deployment_package_java(self, output_dir): + # append to the end of requirements file + packages = self._system_config.deployment_packages( + self._deployment_name, self.language_name + ) + if len(packages): + with open(os.path.join(output_dir, "requirements.txt"), "a") as out: + for package in packages: + out.write(package) + def add_deployment_package_python(self, output_dir): destination_file = f"requirements.txt.{self._language_version}" @@ -406,6 +418,8 @@ def add_deployment_package(self, output_dir): self.add_deployment_package_python(output_dir) elif self.language == Language.NODEJS: self.add_deployment_package_nodejs(output_dir) + elif self.language == Language.JAVA: + self.add_deployment_package_java(output_dir) else: raise NotImplementedError From 8d2e0157f95d4a345ccdda1e49b9fa1e838aac58 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Fri, 20 Sep 2024 00:46:17 +0330 Subject: [PATCH 09/82] Sync hello-world maven paroject with the wrapper of openwhisk --- .../600.java/601.hello-world/my-app/pom.xml | 18 +++++++++ .../main/java/com/example/project/App.java | 17 +++++++++ .../java/com/example/project/AppTest.java | 38 +++++++++++++++++++ benchmarks/wrappers/openwhisk/java/Main.java | 4 +- 4 files changed, 75 insertions(+), 2 deletions(-) create mode 100644 benchmarks/600.java/601.hello-world/my-app/pom.xml create mode 100644 benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java create mode 100644 benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java diff --git a/benchmarks/600.java/601.hello-world/my-app/pom.xml b/benchmarks/600.java/601.hello-world/my-app/pom.xml new file mode 100644 index 00000000..f61b9e7f --- /dev/null +++ b/benchmarks/600.java/601.hello-world/my-app/pom.xml @@ -0,0 +1,18 @@ + + 4.0.0 + com.example.project + my-app + jar + 1.0-SNAPSHOT + my-app + http://maven.apache.org + + + junit + junit + 3.8.1 + test + + + diff --git a/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java b/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java new file mode 100644 index 00000000..f59864ee --- /dev/null +++ b/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java @@ -0,0 +1,17 @@ +package com.example.project; +import com.google.gson.JsonObject; + +/** + * Hello world! + * + */ +public class App +{ + + + public JsonObject handler( String[] args ) + { + JsonObject jsonResult = new JsonObject(); + jsonObject.put("my string=", "heloooo worlddd!"); + } +} diff --git a/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java b/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java new file mode 100644 index 00000000..b3a7066d --- /dev/null +++ b/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java @@ -0,0 +1,38 @@ +package com.example.project; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; + +/** + * Unit test for simple App. + */ +public class AppTest + extends TestCase +{ + /** + * Create the test case + * + * @param testName name of the test case + */ + public AppTest( String testName ) + { + super( testName ); + } + + /** + * @return the suite of tests being tested + */ + public static Test suite() + { + return new TestSuite( AppTest.class ); + } + + /** + * Rigourous Test :-) + */ + public void testApp() + { + assertTrue( true ); + } +} diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java index d21960ae..828aa64f 100644 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -1,5 +1,5 @@ import com.google.gson.JsonObject; -import java.util.faas.Function; +import com.example.project.App ; import java.time.Instant; import java.time.Duration; import java.io.File; @@ -13,7 +13,7 @@ public static JsonObject main(JsonObject args) { // logger.setLevel(Level.INFO); Gson gson = new Gson(); - Function function = new Function(); + App function = new App(); Instant begin = Instant.now(); JsonObject result = function.handler(args); From 8dcddefe7b9820a29dd3d5c3f90b4602ae1635aa Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 24 Sep 2024 01:21:32 +0330 Subject: [PATCH 10/82] Example config file for running 601.hello-world on openwhisk --- config/example2.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/example2.json b/config/example2.json index 1cf21c81..3575d601 100644 --- a/config/example2.json +++ b/config/example2.json @@ -10,7 +10,7 @@ }, "type": "invocation-overhead", "perf-cost": { - "benchmark": "110.dynamic-html", + "benchmark": "601.hello-world", "experiments": ["cold", "warm", "burst", "sequential"], "input-size": "test", "repetitions": 50, From b6c8bb799f2ae6b91baae9fe0c3a44927b2c9647 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 24 Sep 2024 01:23:03 +0330 Subject: [PATCH 11/82] Correct Structure of maven project in 601.hello-world benchmark --- .../600.java/601.hello-world/java/pom.xml | 38 +++++++++++++++++++ .../java/src/java/Function.java | 0 .../java/src/main/java/faas/App.java | 7 ++++ .../600.java/601.hello-world/my-app/pom.xml | 18 --------- .../main/java/com/example/project/App.java | 17 --------- .../java/com/example/project/AppTest.java | 38 ------------------- 6 files changed, 45 insertions(+), 73 deletions(-) delete mode 100644 benchmarks/600.java/601.hello-world/java/src/java/Function.java create mode 100644 benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java delete mode 100644 benchmarks/600.java/601.hello-world/my-app/pom.xml delete mode 100644 benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java delete mode 100644 benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/600.java/601.hello-world/java/pom.xml index e69de29b..eb4f359e 100644 --- a/benchmarks/600.java/601.hello-world/java/pom.xml +++ b/benchmarks/600.java/601.hello-world/java/pom.xml @@ -0,0 +1,38 @@ + + + 4.0.0 + + + faas + 601.hello-world + 1.0-SNAPSHOT + jar + + + + 1.8 + 1.8 + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.2.0 + + + + faas.App + + + + + + + + diff --git a/benchmarks/600.java/601.hello-world/java/src/java/Function.java b/benchmarks/600.java/601.hello-world/java/src/java/Function.java deleted file mode 100644 index e69de29b..00000000 diff --git a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java new file mode 100644 index 00000000..365a6201 --- /dev/null +++ b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java @@ -0,0 +1,7 @@ +package faas; + +public class App { + public static void main(String[] args) { + System.out.println("Hellooooooooooooooooooo, World!"); + } +} \ No newline at end of file diff --git a/benchmarks/600.java/601.hello-world/my-app/pom.xml b/benchmarks/600.java/601.hello-world/my-app/pom.xml deleted file mode 100644 index f61b9e7f..00000000 --- a/benchmarks/600.java/601.hello-world/my-app/pom.xml +++ /dev/null @@ -1,18 +0,0 @@ - - 4.0.0 - com.example.project - my-app - jar - 1.0-SNAPSHOT - my-app - http://maven.apache.org - - - junit - junit - 3.8.1 - test - - - diff --git a/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java b/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java deleted file mode 100644 index f59864ee..00000000 --- a/benchmarks/600.java/601.hello-world/my-app/src/main/java/com/example/project/App.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.example.project; -import com.google.gson.JsonObject; - -/** - * Hello world! - * - */ -public class App -{ - - - public JsonObject handler( String[] args ) - { - JsonObject jsonResult = new JsonObject(); - jsonObject.put("my string=", "heloooo worlddd!"); - } -} diff --git a/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java b/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java deleted file mode 100644 index b3a7066d..00000000 --- a/benchmarks/600.java/601.hello-world/my-app/src/test/java/com/example/project/AppTest.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.example.project; - -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; - -/** - * Unit test for simple App. - */ -public class AppTest - extends TestCase -{ - /** - * Create the test case - * - * @param testName name of the test case - */ - public AppTest( String testName ) - { - super( testName ); - } - - /** - * @return the suite of tests being tested - */ - public static Test suite() - { - return new TestSuite( AppTest.class ); - } - - /** - * Rigourous Test :-) - */ - public void testApp() - { - assertTrue( true ); - } -} From 303297834b2927e3980b9d1f89ddf1a0a8806ca7 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 24 Sep 2024 01:23:55 +0330 Subject: [PATCH 12/82] Expand add_code functions for maven java rojects --- sebs/benchmark.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 4205fb54..6e50f01c 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -317,12 +317,23 @@ def copy_code(self, output_dir): "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], "java": ["pom.xml"], - } path = os.path.join(self.benchmark_path, self.language_name) + for file_type in FILES[self.language_name]: for f in glob.glob(os.path.join(path, file_type)): shutil.copy2(os.path.join(path, f), output_dir) + + # copy src folder of java (java benchmarks are maven project and need directories) + if self.language_name == "java": + output_src_dir = os.path.join(output_dir, "src") + + if os.path.exists(output_src_dir): + # If src dir in output exist, remove the directory and all its contents + shutil.rmtree(output_src_dir) + #To have contents of src directory in the direcory named src located in output + shutil.copytree(os.path.join(path, "src"), output_src_dir) + # support node.js benchmarks with language specific packages nodejs_package_json = os.path.join(path, f"package.json.{self.language_version}") if os.path.exists(nodejs_package_json): From 32235f50c119ed81c41f341eaf6a6e07c519e311 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 24 Sep 2024 01:26:57 +0330 Subject: [PATCH 13/82] Exclude Java main wrapper from Docker directory created in runtimes. --- sebs/openwhisk/openwhisk.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 9c196fe2..68aa42d0 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -115,6 +115,7 @@ def package_code( CONFIG_FILES = { "python": ["__main__.py"], "nodejs": ["index.js"], + "nodejs": ["Main.java"], } package_config = CONFIG_FILES[language_name] From 7f8eb2f8d7cefe455e28e472030cbc4fafde3ca4 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Wed, 25 Sep 2024 13:31:49 +0330 Subject: [PATCH 14/82] Fix a big --- sebs/openwhisk/openwhisk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 68aa42d0..a8e23925 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -115,7 +115,7 @@ def package_code( CONFIG_FILES = { "python": ["__main__.py"], "nodejs": ["index.js"], - "nodejs": ["Main.java"], + "java": ["Main.java"], } package_config = CONFIG_FILES[language_name] From 67bb80cc07142dd90e69b6d78d6cea2da6b2342e Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 16 Mar 2025 01:00:20 +0330 Subject: [PATCH 15/82] Add required changes from PR222 to enable benchmarking of java codes. --- .gitignore | 3 ++ benchmarks/wrappers/openwhisk/java/Main.java | 29 +++++++++------ .../openwhisk/java/Dockerfile.function | 6 ++-- sebs/benchmark.py | 27 +++++++++++++- sebs/openwhisk/openwhisk.py | 36 ++++++++++++++----- 5 files changed, 78 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 0712f6d7..d158df83 100644 --- a/.gitignore +++ b/.gitignore @@ -188,3 +188,6 @@ cache # IntelliJ IDEA files .idea *.iml + +# Visual Studio Code files +.vscode/ \ No newline at end of file diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java index 828aa64f..e10d9e11 100644 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -1,9 +1,13 @@ +import faas.App; +import com.google.gson.Gson; import com.google.gson.JsonObject; -import com.example.project.App ; +import util.SessionBlob; +import util.ShaSecurityProvider; import java.time.Instant; import java.time.Duration; import java.io.File; import java.io.IOException; +//import jakarta.ws.rs.core.Response; public class Main { @@ -15,12 +19,17 @@ public static JsonObject main(JsonObject args) { Gson gson = new Gson(); App function = new App(); + long start_nano = System.nanoTime(); + Instant begin = Instant.now(); JsonObject result = function.handler(args); Instant end = Instant.now(); - long computeTime = Duration.between(begin, end).toNanos() / 1000; // Convert nanoseconds to microseconds + long end_nano = System.nanoTime(); + + // long computeTime = Duration.between(begin, end).toNanos() / 1000; // Convert nanoseconds to microseconds + long computeTime = end_nano - start_nano; boolean isCold = false; String fileName = "/cold_run"; @@ -41,15 +50,13 @@ public static JsonObject main(JsonObject args) { String requestId = System.getenv("__OW_ACTIVATION_ID"); JsonObject jsonResult = new JsonObject(); - jsonObject.put("begin", formattedBegin); - jsonObject.put("end", formattedEnd); - jsonObject.put("request_id", "requestId"); - jsonObject.put("compute_time", computeTime); - jsonObject.put("is_cold", isCold); - jsonObject.put("result", result); + jsonResult.addProperty("begin", formattedBegin); + jsonResult.addProperty("end", formattedEnd); + jsonResult.addProperty("request_id", requestId); + jsonResult.addProperty("compute_time", computeTime); + jsonResult.addProperty("is_cold", isCold); + jsonResult.addProperty("result", result.toString()); return jsonResult; } -} - - \ No newline at end of file +} diff --git a/dockerfiles/openwhisk/java/Dockerfile.function b/dockerfiles/openwhisk/java/Dockerfile.function index d86cd461..b72ceb15 100644 --- a/dockerfiles/openwhisk/java/Dockerfile.function +++ b/dockerfiles/openwhisk/java/Dockerfile.function @@ -2,7 +2,7 @@ ARG BASE_IMAGE FROM $BASE_IMAGE COPY . /function/ -RUN apt-get update && apt-get install -y maven +# RUN apt-get update && apt-get install -y maven -# Check if pom.xml exists before running Maven -RUN if [ -f ./pom.xml ]; then mvn clean install; else echo "pom.xml not found, aborting build." && exit 1; fi +# # Check if pom.xml exists before running Maven +# RUN if [ -f ./pom.xml ]; then mvn clean install; else echo "pom.xml not found, aborting build." && exit 1; fi diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 6e50f01c..edb1cf32 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -1,6 +1,7 @@ import glob import hashlib import json +import subprocess import os import shutil import subprocess @@ -252,8 +253,9 @@ def hash_directory(directory: str, deployment: str, language: str): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], + "java": ["*.java", "pom.xml"], } - WRAPPERS = {"python": "*.py", "nodejs": "*.js"} + WRAPPERS = {"python": "*.py", "nodejs": "*.js", "java": "*.java"} NON_LANG_FILES = ["*.sh", "*.json"] selected_files = FILES[language] + NON_LANG_FILES for file_type in selected_files: @@ -339,6 +341,28 @@ def copy_code(self, output_dir): if os.path.exists(nodejs_package_json): shutil.copy2(nodejs_package_json, os.path.join(output_dir, "package.json")) + #This is for making jar file and add it to docker directory + def add_java_output(self, code_dir): + + if self.language_name == "java": + + # Step 1: Move Main.java o src directory + src_dir = os.path.join(code_dir, "src", "main", "java") + if os.path.exists(code_dir): + main_java_path = os.path.join(code_dir, "Main.java") + if os.path.exists(main_java_path): + shutil.move(main_java_path, src_dir) + + # Step 2: Run mvn clean install + try: + # Navigate to the code directory where the pom.xml file is located + subprocess.run(['mvn', 'clean', 'install'], cwd=code_dir, check=True, text=True, capture_output=True) + print("Maven build successful!") + except subprocess.CalledProcessError as e: + print(f"Error during Maven build:\n{e.stdout}\n{e.stderr}") + return + + def add_benchmark_data(self, output_dir): cmd = "/bin/bash {benchmark_path}/init.sh {output_dir} false {architecture}" paths = [ @@ -617,6 +641,7 @@ def build( self.copy_code(self._output_dir) self.add_benchmark_data(self._output_dir) self.add_deployment_files(self._output_dir) + self.add_java_output(self._output_dir) self.add_deployment_package(self._output_dir) self.install_dependencies(self._output_dir) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index a8e23925..0dc61b75 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -110,14 +110,14 @@ def package_code( directory, language_name, language_version, architecture, benchmark, is_cached ) - # We deploy Minio config in code package since this depends on local - # deployment - it cannnot be a part of Docker image - CONFIG_FILES = { - "python": ["__main__.py"], - "nodejs": ["index.js"], - "java": ["Main.java"], - } - package_config = CONFIG_FILES[language_name] + if language_name != 'java': + # We deploy Minio config in code package since this depends on local + # deployment - it cannnot be a part of Docker image + CONFIG_FILES = { + "python": ["__main__.py"], + "nodejs": ["index.js"], + } + package_config = CONFIG_FILES[language_name] benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( @@ -208,6 +208,25 @@ def create_function( code_package.language_version, code_package.architecture, ) + run_arguments = [ + *self.get_wsk_cmd(), + "action", + "create", + func_name, + "--web", + "true", + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments(), + code_package.code_location, + ] + if code_package.language_name == 'java': + run_arguments.extend(["--main", "Main"]) + subprocess.run( [ *self.get_wsk_cmd(), @@ -229,6 +248,7 @@ def create_function( stdout=subprocess.PIPE, check=True, ) + function_cfg.docker_image = docker_image res = OpenWhiskFunction( func_name, code_package.benchmark, code_package.hash, function_cfg From 19c572d9ced40d623a776d467c37e24cbe582904 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 18 Mar 2025 14:23:54 +0330 Subject: [PATCH 16/82] Fix bug: Java simple benchmark (601.hello_world) now works correctly --- .../600.java/601.hello-world/config.json | 8 +-- benchmarks/600.java/601.hello-world/input.py | 5 ++ .../600.java/601.hello-world/java/pom.xml | 53 +++++++++++-------- .../java/src/main/java/faas/App.java | 8 ++- benchmarks/wrappers/openwhisk/java/Main.java | 5 +- 5 files changed, 47 insertions(+), 32 deletions(-) create mode 100644 benchmarks/600.java/601.hello-world/input.py diff --git a/benchmarks/600.java/601.hello-world/config.json b/benchmarks/600.java/601.hello-world/config.json index 0c5d480e..e3d6f85f 100644 --- a/benchmarks/600.java/601.hello-world/config.json +++ b/benchmarks/600.java/601.hello-world/config.json @@ -1,6 +1,6 @@ { - "timeout": 120, - "memory": 512, + "timeout": 60, + "memory": 256, "languages": ["java"] - } - \ No newline at end of file +} + diff --git a/benchmarks/600.java/601.hello-world/input.py b/benchmarks/600.java/601.hello-world/input.py new file mode 100644 index 00000000..136f8bc5 --- /dev/null +++ b/benchmarks/600.java/601.hello-world/input.py @@ -0,0 +1,5 @@ +def buckets_count(): + return (0, 0) + +def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func): + return { } \ No newline at end of file diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/600.java/601.hello-world/java/pom.xml index eb4f359e..f5e1e781 100644 --- a/benchmarks/600.java/601.hello-world/java/pom.xml +++ b/benchmarks/600.java/601.hello-world/java/pom.xml @@ -1,38 +1,45 @@ - - + + 4.0.0 - faas - 601.hello-world - 1.0-SNAPSHOT - jar + benchmark + 1 - - 1.8 - 1.8 + 8 + 8 + UTF-8 - + + + + com.google.code.gson + gson + 2.11.0 + + org.apache.maven.plugins - maven-jar-plugin - 3.2.0 - - - - faas.App - - - + maven-shade-plugin + 3.2.4 + + + package + + shade + + + false + + + - diff --git a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java index 365a6201..367cc204 100644 --- a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java +++ b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java @@ -1,7 +1,11 @@ package faas; +import com.google.gson.JsonObject; public class App { - public static void main(String[] args) { - System.out.println("Hellooooooooooooooooooo, World!"); + public JsonObject handler(JsonObject args) { + + JsonObject jsonResult = new JsonObject(); + jsonResult.addProperty("Hello", "World"); + return jsonResult; } } \ No newline at end of file diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java index e10d9e11..138b7dfe 100644 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -1,13 +1,11 @@ import faas.App; import com.google.gson.Gson; import com.google.gson.JsonObject; -import util.SessionBlob; -import util.ShaSecurityProvider; import java.time.Instant; import java.time.Duration; import java.io.File; import java.io.IOException; -//import jakarta.ws.rs.core.Response; + public class Main { @@ -60,3 +58,4 @@ public static JsonObject main(JsonObject args) { } } + From ab2bc2dcce7dd6d3e8fc86b8b70f285c8a54befb Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 18 Mar 2025 14:54:57 +0330 Subject: [PATCH 17/82] Use language enum instead of hardcoded 'java' --- sebs/benchmark.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index edb1cf32..839feed2 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -315,6 +315,8 @@ def query_cache(self): self._is_cached_valid = False def copy_code(self, output_dir): + from sebs.faas.function import Language + FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], @@ -327,7 +329,7 @@ def copy_code(self, output_dir): shutil.copy2(os.path.join(path, f), output_dir) # copy src folder of java (java benchmarks are maven project and need directories) - if self.language_name == "java": + if self.language == Language.JAVA: output_src_dir = os.path.join(output_dir, "src") if os.path.exists(output_src_dir): @@ -343,8 +345,8 @@ def copy_code(self, output_dir): #This is for making jar file and add it to docker directory def add_java_output(self, code_dir): - - if self.language_name == "java": + from sebs.faas.function import Language + if self.language == Language.JAVA: # Step 1: Move Main.java o src directory src_dir = os.path.join(code_dir, "src", "main", "java") From 37c37aee43d9e1be8c93ecfe7b3d69defc32c282 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Tue, 18 Mar 2025 16:33:14 +0330 Subject: [PATCH 18/82] Remove unused parts from the Java benchmarks wrapper for OpenWhisk. --- benchmarks/wrappers/openwhisk/java/Main.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java index 138b7dfe..161dc7bd 100644 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -1,5 +1,4 @@ import faas.App; -import com.google.gson.Gson; import com.google.gson.JsonObject; import java.time.Instant; import java.time.Duration; @@ -7,14 +6,9 @@ import java.io.IOException; - public class Main { public static JsonObject main(JsonObject args) { - - // Logger logger = Logger.getLogger(FunctionHandler.class.getName()); - // logger.setLevel(Level.INFO); - Gson gson = new Gson(); App function = new App(); long start_nano = System.nanoTime(); From 601903f2dd396aaee8c91388aa21c2a00e672aed Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Wed, 19 Mar 2025 15:36:53 +0330 Subject: [PATCH 19/82] Change the directory where the file is created in container for detecting cold starts --- benchmarks/wrappers/openwhisk/java/Main.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java index 161dc7bd..59a33ee3 100644 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ b/benchmarks/wrappers/openwhisk/java/Main.java @@ -23,7 +23,7 @@ public static JsonObject main(JsonObject args) { long computeTime = end_nano - start_nano; boolean isCold = false; - String fileName = "/cold_run"; + String fileName = "/tmp/cold_run"; File file = new File(fileName); if (!file.exists()) { From 2c59db1a79eac7f6dc9bb0d99e2f5371826a8abb Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 4 May 2025 13:55:24 +0330 Subject: [PATCH 20/82] Add Java wrapper for AWS Lambda benchmarks --- benchmarks/wrappers/aws/java/Handler.java | 75 +++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 benchmarks/wrappers/aws/java/Handler.java diff --git a/benchmarks/wrappers/aws/java/Handler.java b/benchmarks/wrappers/aws/java/Handler.java new file mode 100644 index 00000000..b51de44c --- /dev/null +++ b/benchmarks/wrappers/aws/java/Handler.java @@ -0,0 +1,75 @@ +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.core.type.TypeReference; + +import faas.App; + +import java.io.File; +import java.io.IOException; +import java.time.Instant; +import java.util.HashMap; +import java.util.Map; + +public class Handler implements RequestHandler, String> { + private static final ObjectMapper mapper = new ObjectMapper(); + + @Override + public String handleRequest(Map event, Context context) { + + Map inputData = event; + + // Extract input if trigger is API Gateway (body is a string) + if (event.containsKey("body") && event.get("body") instanceof String) + try { + inputData = mapper.readValue((String) event.get("body"),new TypeReference>() {}); + } catch (IOException e) { + throw new RuntimeException("Failed to parse JSON body", e); + } + + App function = new App(); + + Instant begin = Instant.now(); + long start_nano = System.nanoTime(); + + Map functionOutput = function.handler(inputData); + + long end_nano = System.nanoTime(); + Instant end = Instant.now(); + + + long computeTime = end_nano - start_nano; + // Detect cold start + boolean isCold = false; + String fileName = "/tmp/cold_run"; + + File file = new File(fileName); + if (!file.exists()) { + isCold = true; + try { + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + // Convert to Unix timestamp in seconds.microseconds + String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); // Convert nanoseconds to microseconds + String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); + + + Map result = new HashMap<>(); + result.put("begin", formattedBegin); + result.put("end", formattedEnd); + result.put("request_id", context.getAwsRequestId()); + result.put("compute_time", computeTime); + result.put("is_cold", isCold); + result.put("result", functionOutput); + try { + return mapper.writeValueAsString(result); + } catch (IOException e) { + throw new RuntimeException("Failed to serialize result of benchmark to JSON in Wrapper", e); + } + + } +} From 726e07a6c7fd73f04bcd8cdc14eff8ab40410b7f Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 11 May 2025 01:05:22 +0330 Subject: [PATCH 21/82] Add the nosql_func argument to the signature of generate_input --- benchmarks/600.java/601.hello-world/input.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/benchmarks/600.java/601.hello-world/input.py b/benchmarks/600.java/601.hello-world/input.py index 136f8bc5..52536abf 100644 --- a/benchmarks/600.java/601.hello-world/input.py +++ b/benchmarks/600.java/601.hello-world/input.py @@ -1,5 +1,13 @@ def buckets_count(): return (0, 0) -def generate_input(data_dir, size, benchmarks_bucket, input_paths, output_paths, upload_func): +def generate_input( + data_dir, + size, + benchmarks_bucket, + input_paths, + output_paths, + upload_func, + nosql_func=None +): return { } \ No newline at end of file From 4f88a8dfabc7efd6fa55e4b20146d8c767541d31 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 11 May 2025 01:16:43 +0330 Subject: [PATCH 22/82] Update hello_world benchmark input/output) for platform-independence Removed gson package, which was only suitable for OpenWhisk. --- .../java/src/main/java/faas/App.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java index 367cc204..fe0b2096 100644 --- a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java +++ b/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java @@ -1,11 +1,13 @@ package faas; -import com.google.gson.JsonObject; +import java.util.HashMap; +import java.util.Map; public class App { - public JsonObject handler(JsonObject args) { + public Map handler(Map input) { - JsonObject jsonResult = new JsonObject(); - jsonResult.addProperty("Hello", "World"); - return jsonResult; + Map result = new HashMap<>(); + result.put("Hello", "World"); + return result; } -} \ No newline at end of file +} + From 139cf2992f983f77df9911f63824605a1feccf8a Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 11 May 2025 01:21:23 +0330 Subject: [PATCH 23/82] Update system.json for Java on AWS --- config/systems.json | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/config/systems.json b/config/systems.json index 3bbb5acb..b00fe237 100644 --- a/config/systems.json +++ b/config/systems.json @@ -121,10 +121,32 @@ "uuid": "3.4.0" } } - } + }, + "java": { + "base_images": { + "x64": { + "11": "amazon/aws-lambda-java:11" + }, + "arm64": { + "11": "amazon/aws-lambda-java:11" + } + }, + "images": [ + "build" + ], + "deployment": { + "files": [ + "Handler.java" + ], + "packages": { + "com.amazonaws:aws-lambda-java-core": "1.2.3", + "com.fasterxml.jackson.core:jackson-databind": "2.15.2" + } + } + } }, "architecture": ["x64", "arm64"], - "deployments": ["package", "container"] + "deployments": ["package"] }, "azure": { "languages": { From b1a0a0f95eddab5d72f47d37c5a1885a34be1edb Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sun, 11 May 2025 01:24:27 +0330 Subject: [PATCH 24/82] Platform-related dependencies for Java benchmarks are now added dynamically at runtime to pom.xml --- .../600.java/601.hello-world/java/pom.xml | 90 +++++++++++-------- sebs/benchmark.py | 47 +++++++--- 2 files changed, 88 insertions(+), 49 deletions(-) diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/600.java/601.hello-world/java/pom.xml index f5e1e781..d504d9bc 100644 --- a/benchmarks/600.java/601.hello-world/java/pom.xml +++ b/benchmarks/600.java/601.hello-world/java/pom.xml @@ -1,45 +1,59 @@ - - 4.0.0 + 4.0.0 - faas - benchmark - 1 + faas + benchmark + 1.0 - - 8 - 8 - UTF-8 - + + UTF-8 + ${env.JAVA_VERSION} + - - - - com.google.code.gson - gson - 2.11.0 - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.2.4 - - - package - - shade - - - false - - - - - - + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + ${java.version} + ${java.version} + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.2.4 + + + package + shade + + + + *:* + + module-info.class + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 839feed2..5c264fd9 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -397,15 +397,6 @@ def add_deployment_files(self, output_dir): for file in handlers: shutil.copy2(file, os.path.join(output_dir)) - def add_deployment_package_java(self, output_dir): - # append to the end of requirements file - packages = self._system_config.deployment_packages( - self._deployment_name, self.language_name - ) - if len(packages): - with open(os.path.join(output_dir, "requirements.txt"), "a") as out: - for package in packages: - out.write(package) def add_deployment_package_python(self, output_dir): @@ -448,6 +439,40 @@ def add_deployment_package_nodejs(self, output_dir): with open(package_config, "w") as package_file: json.dump(package_json, package_file, indent=2) + # Dependencies in system.json are in "group:artifact": version format; + # this function converts them to proper Maven blocks. + def format_maven_dependency(self, group_artifact: str, version: str) -> str: + group_id, artifact_id = group_artifact.split(":") + return f""" + + {group_id} + {artifact_id} + {version} + """ + + def add_deployment_package_java(self, output_dir): + + pom_path = os.path.join(output_dir, "pom.xml") + with open(pom_path, "r") as f: + pom_content = f.read() + + packages = self._system_config.deployment_packages(self._deployment_name, self.language_name) + + dependency_blocks = "" + if len(packages): + for key, val in packages.items(): + dependency_name = key.strip('"').strip("'") + dependency_version = val.strip('"').strip("'") + dependency_blocks += self.format_maven_dependency(dependency_name, dependency_version) + "\n" + + if "" not in pom_content: + raise ValueError("pom.xml template is missing placeholder") + + pom_content = pom_content.replace("", dependency_blocks.strip()) + + with open(pom_path, "w") as f: + f.write(pom_content) + def add_deployment_package(self, output_dir): from sebs.faas.function import Language @@ -514,7 +539,7 @@ def install_dependencies(self, output_dir): } # run Docker container to install packages - PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json"} + PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json", "java" : "pom.xml"} file = os.path.join(output_dir, PACKAGE_FILES[self.language_name]) if os.path.exists(file): try: @@ -643,7 +668,7 @@ def build( self.copy_code(self._output_dir) self.add_benchmark_data(self._output_dir) self.add_deployment_files(self._output_dir) - self.add_java_output(self._output_dir) +# self.add_java_output(self._output_dir) self.add_deployment_package(self._output_dir) self.install_dependencies(self._output_dir) From 5625faf5098593ffa0ddc49c7b82420a86d1072c Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 15 May 2025 00:12:15 +0330 Subject: [PATCH 25/82] Add dockefile and installer for java on AWS --- dockerfiles/aws/java/Dockerfile.build | 22 ++++++++++++++++++++++ dockerfiles/java_installer.sh | 8 ++++++++ 2 files changed, 30 insertions(+) create mode 100644 dockerfiles/aws/java/Dockerfile.build create mode 100644 dockerfiles/java_installer.sh diff --git a/dockerfiles/aws/java/Dockerfile.build b/dockerfiles/aws/java/Dockerfile.build new file mode 100644 index 00000000..2b2a09e0 --- /dev/null +++ b/dockerfiles/aws/java/Dockerfile.build @@ -0,0 +1,22 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV JAVA_VERSION=${VERSION} + + +# useradd, groupmod + maven +RUN yum install -y shadow-utils maven +ENV GOSU_VERSION 1.14 +# https://github.com/tianon/gosu/releases/tag/1.14 +# key https://keys.openpgp.org/search?q=tianon%40debian.org +RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64" \ + && chmod +x /usr/local/bin/gosu +RUN mkdir -p /sebs/ +COPY dockerfiles/java_installer.sh /sebs/installer.sh +COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] \ No newline at end of file diff --git a/dockerfiles/java_installer.sh b/dockerfiles/java_installer.sh new file mode 100644 index 00000000..1cc221f3 --- /dev/null +++ b/dockerfiles/java_installer.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +cd /mnt/function + +mvn clean install + + + From f784af3f748aa2df0febdca315e95b3d5cee83eb Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 15 May 2025 00:19:21 +0330 Subject: [PATCH 26/82] Adopt the output dir of Java wrappers for compatibility with Maven's structure --- sebs/benchmark.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 5c264fd9..ac429c1d 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -394,8 +394,17 @@ def add_deployment_files(self, output_dir): self._deployment_name, self.language_name ) ] + + final_path = output_dir + + # For Java, use Maven structure: put handler files in src/main/java/ + if self.language_name == 'java': + final_path = os.path.join(output_dir, 'src', 'main', 'java') + os.makedirs(final_path, exist_ok=True) # make sure the path exists + for file in handlers: - shutil.copy2(file, os.path.join(output_dir)) + shutil.copy2(file, final_path) + def add_deployment_package_python(self, output_dir): From e7fad85fd806b3b1b47a405846cebe402f5f2457 Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Thu, 15 May 2025 02:55:49 +0330 Subject: [PATCH 27/82] Update hashing to adapt to Java benchmark directory structure --- sebs/benchmark.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index ac429c1d..48797228 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -253,16 +253,18 @@ def hash_directory(directory: str, deployment: str, language: str): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], - "java": ["*.java", "pom.xml"], + # Use recursive Java scan since *.java files are located in subfolders. + "java": ["**/*.java", "pom.xml"], } WRAPPERS = {"python": "*.py", "nodejs": "*.js", "java": "*.java"} NON_LANG_FILES = ["*.sh", "*.json"] selected_files = FILES[language] + NON_LANG_FILES for file_type in selected_files: - for f in glob.glob(os.path.join(directory, file_type)): - path = os.path.join(directory, f) - with open(path, "rb") as opened_file: - hash_sum.update(opened_file.read()) + for f in glob.glob(os.path.join(directory, file_type), recursive=True): + if os.path.isfile(f): + path = os.path.join(directory, f) + with open(path, "rb") as opened_file: + hash_sum.update(opened_file.read()) # wrappers wrappers = project_absolute_path( "benchmarks", "wrappers", deployment, language, WRAPPERS[language] From f1796c1ca4cc43e2b45d9b9c124aeb614b0d3c2d Mon Sep 17 00:00:00 2001 From: mahlashrifi Date: Sat, 17 May 2025 08:21:42 +0330 Subject: [PATCH 28/82] Java HelloWorld benchmark now runs on AWS (requires enhancement) --- sebs/aws/aws.py | 72 ++++++++++++++++++++++++++++++------------------- 1 file changed, 44 insertions(+), 28 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 243a6f0f..abea416e 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -134,33 +134,46 @@ def package_code( directory, language_name, language_version, architecture, benchmark, is_cached ) - CONFIG_FILES = { - "python": ["handler.py", "requirements.txt", ".python_packages"], - "nodejs": ["handler.js", "package.json", "node_modules"], - } - package_config = CONFIG_FILES[language_name] - function_dir = os.path.join(directory, "function") - os.makedirs(function_dir) - # move all files to 'function' except handler.py - for file in os.listdir(directory): - if file not in package_config: - file = os.path.join(directory, file) - shutil.move(file, function_dir) - # FIXME: use zipfile - # create zip with hidden directory but without parent directory - execute("zip -qu -r9 {}.zip * .".format(benchmark), shell=True, cwd=directory) - benchmark_archive = "{}.zip".format(os.path.join(directory, benchmark)) - self.logging.info("Created {} archive".format(benchmark_archive)) - - bytes_size = os.path.getsize(os.path.join(directory, benchmark_archive)) - mbytes = bytes_size / 1024.0 / 1024.0 - self.logging.info("Zip archive size {:2f} MB".format(mbytes)) - - return ( - os.path.join(directory, "{}.zip".format(benchmark)), - bytes_size, - container_uri, - ) + if (language_name == 'java'): + + jar_path = os.path.join(directory, "target", "benchmark-1.0.jar") + bytes_size = os.path.getsize(jar_path) + + return ( + jar_path, + bytes_size, + container_uri, + ) + + else: + # so no need to add anything here + CONFIG_FILES = { + "python": ["handler.py", "requirements.txt", ".python_packages"], + "nodejs": ["handler.js", "package.json", "node_modules"], + } + package_config = CONFIG_FILES[language_name] + function_dir = os.path.join(directory, "function") + os.makedirs(function_dir) + # move all files to 'function' except handler.py + for file in os.listdir(directory): + if file not in package_config: + file = os.path.join(directory, file) + shutil.move(file, function_dir) + # FIXME: use zipfile + # create zip with hidden directory but without parent directory + execute("zip -qu -r9 {}.zip * .".format(benchmark), shell=True, cwd=directory) + benchmark_archive = "{}.zip".format(os.path.join(directory, benchmark)) + self.logging.info("Created {} archive".format(benchmark_archive)) + + bytes_size = os.path.getsize(os.path.join(directory, benchmark_archive)) + mbytes = bytes_size / 1024.0 / 1024.0 + self.logging.info("Zip archive size {:2f} MB".format(mbytes)) + + return ( + os.path.join(directory, "{}.zip".format(benchmark)), + bytes_size, + container_uri, + ) def _map_architecture(self, architecture: str) -> str: @@ -254,7 +267,10 @@ def create_function( create_function_params["Runtime"] = "{}{}".format( language, self._map_language_runtime(language, language_runtime) ) - create_function_params["Handler"] = "handler.handler" + if language == "java": + create_function_params["Handler"] = "Handler::handleRequest" + else: + create_function_params["Handler"] = "handler.handler" create_function_params = { k: v for k, v in create_function_params.items() if v is not None From a66fda75d5bff8b87ef58555f1c65bfd9518ae42 Mon Sep 17 00:00:00 2001 From: Alexander Schlieper Date: Thu, 11 Dec 2025 13:52:06 +0100 Subject: [PATCH 29/82] Add Java runtime support for AWS Lambda and Azure Functions Implements complete Java runtime infrastructure enabling serverless function benchmarking on AWS Lambda and Azure Functions platforms. Key components: - AWS/Azure function wrappers with cold start tracking and JSON handling - Maven-based build system with shaded JAR packaging - Docker build images for both platforms (Java 17) - Package structure: lib/ for JARs, handler/ for function metadata Benchmarks implemented: - 010.sleep: Basic microbenchmark for testing infrastructure - 110.dynamic-html: Web app with Mustache templating Critical fixes: - Removed Maven quiet mode (-q) causing Docker build failures - Proper dependency scoping (Azure Functions and Jackson must be compiled, not provided) - Platform-specific packaging logic (JAR structure differs from Python/Node.js) Tested and verified working on Azure Functions with Java 17. --- .../000.microbenchmarks/010.sleep/config.json | 2 +- .../java/src/main/java/function/Function.java | 33 ++++++ .../100.webapps/110.dynamic-html/config.json | 2 +- .../100.webapps/110.dynamic-html/java/init.sh | 6 ++ .../100.webapps/110.dynamic-html/java/pom.xml | 73 +++++++++++++ .../java/src/main/java/function/Function.java | 100 ++++++++++++++++++ .../java/templates/template.html | 26 +++++ benchmarks/wrappers/aws/java/pom.xml | 55 ++++++++++ .../org/serverlessbench/ColdStartTracker.java | 35 ++++++ .../org/serverlessbench/FunctionInvoker.java | 41 +++++++ .../java/org/serverlessbench/Handler.java | 49 +++++++++ benchmarks/wrappers/azure/java/pom.xml | 71 +++++++++++++ .../org/serverlessbench/ColdStartTracker.java | 33 ++++++ .../org/serverlessbench/FunctionInvoker.java | 41 +++++++ .../java/org/serverlessbench/Handler.java | 79 ++++++++++++++ config/systems.json | 32 ++++-- dockerfiles/aws/java/Dockerfile.build | 10 +- dockerfiles/aws/java/Dockerfile.function | 16 +++ dockerfiles/azure/java/Dockerfile.build | 18 ++++ dockerfiles/java_installer.sh | 15 ++- sebs/aws/aws.py | 50 ++++----- sebs/azure/azure.py | 94 +++++++++++++--- sebs/benchmark.py | 66 ++++++------ tools/build_docker_images.py | 19 +++- 24 files changed, 876 insertions(+), 90 deletions(-) create mode 100644 benchmarks/000.microbenchmarks/010.sleep/java/src/main/java/function/Function.java create mode 100755 benchmarks/100.webapps/110.dynamic-html/java/init.sh create mode 100644 benchmarks/100.webapps/110.dynamic-html/java/pom.xml create mode 100644 benchmarks/100.webapps/110.dynamic-html/java/src/main/java/function/Function.java create mode 100644 benchmarks/100.webapps/110.dynamic-html/java/templates/template.html create mode 100644 benchmarks/wrappers/aws/java/pom.xml create mode 100644 benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java create mode 100644 benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java create mode 100644 benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java create mode 100644 benchmarks/wrappers/azure/java/pom.xml create mode 100644 benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java create mode 100644 benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java create mode 100644 benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java create mode 100644 dockerfiles/aws/java/Dockerfile.function create mode 100644 dockerfiles/azure/java/Dockerfile.build diff --git a/benchmarks/000.microbenchmarks/010.sleep/config.json b/benchmarks/000.microbenchmarks/010.sleep/config.json index 93ce2f56..ce9e1e32 100644 --- a/benchmarks/000.microbenchmarks/010.sleep/config.json +++ b/benchmarks/000.microbenchmarks/010.sleep/config.json @@ -1,6 +1,6 @@ { "timeout": 120, "memory": 128, - "languages": ["python", "nodejs"], + "languages": ["python", "nodejs", "java"], "modules": [] } diff --git a/benchmarks/000.microbenchmarks/010.sleep/java/src/main/java/function/Function.java b/benchmarks/000.microbenchmarks/010.sleep/java/src/main/java/function/Function.java new file mode 100644 index 00000000..acd2b8f3 --- /dev/null +++ b/benchmarks/000.microbenchmarks/010.sleep/java/src/main/java/function/Function.java @@ -0,0 +1,33 @@ +package function; + +import java.util.HashMap; +import java.util.Map; + +public class Function { + + public Map handler(Map event) { + double sleepSeconds = parseSeconds(event.get("sleep")); + try { + Thread.sleep((long) (sleepSeconds * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + Map result = new HashMap<>(); + result.put("result", sleepSeconds); + return result; + } + + private double parseSeconds(Object value) { + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + if (value instanceof String) { + try { + return Double.parseDouble((String) value); + } catch (NumberFormatException ignored) { + return 0; + } + } + return 0; + } +} diff --git a/benchmarks/100.webapps/110.dynamic-html/config.json b/benchmarks/100.webapps/110.dynamic-html/config.json index 25254c24..7e317037 100644 --- a/benchmarks/100.webapps/110.dynamic-html/config.json +++ b/benchmarks/100.webapps/110.dynamic-html/config.json @@ -1,6 +1,6 @@ { "timeout": 10, "memory": 128, - "languages": ["python", "nodejs"], + "languages": ["python", "nodejs", "java"], "modules": [] } diff --git a/benchmarks/100.webapps/110.dynamic-html/java/init.sh b/benchmarks/100.webapps/110.dynamic-html/java/init.sh new file mode 100755 index 00000000..b2657429 --- /dev/null +++ b/benchmarks/100.webapps/110.dynamic-html/java/init.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +OUTPUT_DIR=$1 + +# Copy templates directory to the output directory +cp -r templates "$OUTPUT_DIR/" diff --git a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml new file mode 100644 index 00000000..fb7e685e --- /dev/null +++ b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml @@ -0,0 +1,73 @@ + + + 4.0.0 + function + dynamic-html + 1.0 + + 17 + 17 + UTF-8 + + + + + com.github.spullara.mustache.java + compiler + 0.9.10 + + + + com.microsoft.azure.functions + azure-functions-java-library + 3.0.0 + + + + com.fasterxml.jackson.core + jackson-databind + 2.17.1 + + + + + + ${project.basedir}/templates + templates + + **/*.html + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.1 + + + package + + shade + + + false + + + *:* + + META-INF/*.SF + META-INF/*.RSA + META-INF/*.DSA + + + + + + + + + + diff --git a/benchmarks/100.webapps/110.dynamic-html/java/src/main/java/function/Function.java b/benchmarks/100.webapps/110.dynamic-html/java/src/main/java/function/Function.java new file mode 100644 index 00000000..20b38f47 --- /dev/null +++ b/benchmarks/100.webapps/110.dynamic-html/java/src/main/java/function/Function.java @@ -0,0 +1,100 @@ +package function; + +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.Mustache; +import com.github.mustachejava.MustacheFactory; + +import java.io.*; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; + +public class Function { + + private static final DateTimeFormatter DATE_FORMATTER = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + public Map handler(Map event) { + try { + // Get input parameters + String username = (String) event.getOrDefault("username", "Guest"); + int randomLen = parseRandomLen(event.get("random_len")); + + // Generate random numbers + List randomNumbers = generateRandomNumbers(randomLen); + + // Get current time + String currentTime = LocalDateTime.now().format(DATE_FORMATTER); + + // Prepare template data + Map templateData = new HashMap<>(); + templateData.put("username", username); + templateData.put("cur_time", currentTime); + templateData.put("random_numbers", randomNumbers); + + // Render HTML + String html = renderTemplate(templateData); + + // Return result + Map result = new HashMap<>(); + result.put("result", html); + return result; + + } catch (Exception e) { + // Return error as result to avoid crashing + Map result = new HashMap<>(); + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + result.put("result", "

Error

" + 
+                      sw.toString() + "
"); + return result; + } + } + + private int parseRandomLen(Object value) { + if (value instanceof Number) { + return ((Number) value).intValue(); + } + if (value instanceof String) { + try { + return Integer.parseInt((String) value); + } catch (NumberFormatException e) { + return 10; // default + } + } + return 10; // default + } + + private List generateRandomNumbers(int count) { + Random random = new Random(); + List numbers = new ArrayList<>(count); + for (int i = 0; i < count; i++) { + numbers.add(random.nextInt(1000000)); + } + return numbers; + } + + private String renderTemplate(Map data) throws Exception { + // Try to load template from classpath + InputStream templateStream = getClass().getClassLoader() + .getResourceAsStream("templates/template.html"); + + if (templateStream == null) { + throw new IOException("Template not found in classpath"); + } + + // Create Mustache factory and compile template + MustacheFactory mf = new DefaultMustacheFactory(); + Mustache mustache; + + try (InputStreamReader reader = new InputStreamReader(templateStream)) { + mustache = mf.compile(reader, "template"); + } + + // Render template + StringWriter writer = new StringWriter(); + mustache.execute(writer, data).flush(); + return writer.toString(); + } +} diff --git a/benchmarks/100.webapps/110.dynamic-html/java/templates/template.html b/benchmarks/100.webapps/110.dynamic-html/java/templates/template.html new file mode 100644 index 00000000..46199563 --- /dev/null +++ b/benchmarks/100.webapps/110.dynamic-html/java/templates/template.html @@ -0,0 +1,26 @@ + + + + Randomly generated data. + + + + + +
+

Welcome {{username}}!

+

Data generated at: {{cur_time}}!

+

Requested random numbers:

+
    + {{#random_numbers}} +
  • {{.}}
  • + {{/random_numbers}} +
+
+ + diff --git a/benchmarks/wrappers/aws/java/pom.xml b/benchmarks/wrappers/aws/java/pom.xml new file mode 100644 index 00000000..f4d08321 --- /dev/null +++ b/benchmarks/wrappers/aws/java/pom.xml @@ -0,0 +1,55 @@ + + 4.0.0 + org.serverlessbench + function + 1.0.0 + + 17 + 17 + + + + com.amazonaws + aws-lambda-java-core + 1.2.3 + + + com.fasterxml.jackson.core + jackson-databind + 2.17.1 + + + + function + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.1 + + + package + + shade + + + false + + + *:* + + META-INF/*.SF + META-INF/*.RSA + META-INF/*.DSA + + + + + + + + + + diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java new file mode 100644 index 00000000..e7cb2e01 --- /dev/null +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -0,0 +1,35 @@ +package org.serverlessbench; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; + +final class ColdStartTracker { + + private static final AtomicBoolean COLD = new AtomicBoolean(true); + private static final Path MARKER = Path.of("/tmp/cold_run"); + + private ColdStartTracker() {} + + static boolean isCold() { + if (Files.exists(MARKER)) { + COLD.set(false); + return false; + } + boolean first = COLD.getAndSet(false); + if (first) { + try { + Files.writeString( + MARKER, + UUID.randomUUID().toString().substring(0, 8), + StandardCharsets.UTF_8); + } catch (IOException ignored) { + // best-effort marker write + } + } + return first; + } +} diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java new file mode 100644 index 00000000..7d9c8357 --- /dev/null +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java @@ -0,0 +1,41 @@ +package org.serverlessbench; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; + +final class FunctionInvoker { + + private static final String DEFAULT_CLASS = "function.Function"; + private static final String DEFAULT_METHOD = "handler"; + + private FunctionInvoker() {} + + static Map invoke(Map input) { + try { + Class fnClass = Class.forName(DEFAULT_CLASS); + Object instance = fnClass.getDeclaredConstructor().newInstance(); + Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); + Object result = method.invoke(instance, input); + if (result instanceof Map) { + @SuppressWarnings("unchecked") + Map casted = (Map) result; + return casted; + } + } catch (ClassNotFoundException e) { + return defaultResponse("Function implementation not found"); + } catch (NoSuchMethodException e) { + return defaultResponse("Function.handler(Map) missing"); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + return defaultResponse("Failed to invoke function: " + e.getMessage()); + } + return defaultResponse("Function returned unsupported type"); + } + + private static Map defaultResponse(String message) { + Map out = new HashMap<>(); + out.put("output", message); + return out; + } +} diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java new file mode 100644 index 00000000..5c1781e6 --- /dev/null +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java @@ -0,0 +1,49 @@ +package org.serverlessbench; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.util.HashMap; +import java.util.Map; + +public class Handler implements RequestHandler, Map> { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + @Override + public Map handleRequest(Map event, Context context) { + long beginNs = System.nanoTime(); + Map normalized = normalize(event); + Map result = FunctionInvoker.invoke(normalized); + long endNs = System.nanoTime(); + + Map body = new HashMap<>(); + body.put("begin", beginNs / 1_000_000_000.0); + body.put("end", endNs / 1_000_000_000.0); + body.put("compute_time", (endNs - beginNs) / 1_000.0); + body.put("results_time", 0); + body.put("result", result); + body.put("is_cold", ColdStartTracker.isCold()); + body.put("request_id", context != null ? context.getAwsRequestId() : ""); + + return body; + } + + private Map normalize(Map event) { + if (event == null) { + return new HashMap<>(); + } + Object body = event.get("body"); + if (body instanceof String) { + try { + @SuppressWarnings("unchecked") + Map parsed = MAPPER.readValue((String) body, Map.class); + return parsed; + } catch (Exception ignored) { + // fall back to original event + } + } + return new HashMap<>(event); + } +} diff --git a/benchmarks/wrappers/azure/java/pom.xml b/benchmarks/wrappers/azure/java/pom.xml new file mode 100644 index 00000000..195df616 --- /dev/null +++ b/benchmarks/wrappers/azure/java/pom.xml @@ -0,0 +1,71 @@ + + 4.0.0 + org.serverlessbench + function + 1.0.0 + + 17 + 17 + + + + com.microsoft.azure.functions + azure-functions-java-library + 3.0.0 + + + com.fasterxml.jackson.core + jackson-databind + 2.17.1 + + + + function + + + com.microsoft.azure + azure-functions-maven-plugin + 1.31.0 + + unused + unused + westeurope + + linux + 17 + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.1 + + + package + + shade + + + false + + + *:* + + META-INF/*.SF + META-INF/*.RSA + META-INF/*.DSA + + + + + + + + + + diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java new file mode 100644 index 00000000..fbedaa20 --- /dev/null +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -0,0 +1,33 @@ +package org.serverlessbench; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; + +final class ColdStartTracker { + + private static final AtomicBoolean WORKER_COLD = new AtomicBoolean(true); + private static final Path MARKER = Path.of("/tmp/cold_run"); + + private ColdStartTracker() {} + + static boolean isCold() { + if (Files.exists(MARKER)) { + return false; + } + try { + Files.writeString( + MARKER, UUID.randomUUID().toString().substring(0, 8), StandardCharsets.UTF_8); + } catch (IOException ignored) { + // best-effort marker write + } + return true; + } + + static boolean isWorkerCold() { + return WORKER_COLD.getAndSet(false); + } +} diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java new file mode 100644 index 00000000..7d9c8357 --- /dev/null +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java @@ -0,0 +1,41 @@ +package org.serverlessbench; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; + +final class FunctionInvoker { + + private static final String DEFAULT_CLASS = "function.Function"; + private static final String DEFAULT_METHOD = "handler"; + + private FunctionInvoker() {} + + static Map invoke(Map input) { + try { + Class fnClass = Class.forName(DEFAULT_CLASS); + Object instance = fnClass.getDeclaredConstructor().newInstance(); + Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); + Object result = method.invoke(instance, input); + if (result instanceof Map) { + @SuppressWarnings("unchecked") + Map casted = (Map) result; + return casted; + } + } catch (ClassNotFoundException e) { + return defaultResponse("Function implementation not found"); + } catch (NoSuchMethodException e) { + return defaultResponse("Function.handler(Map) missing"); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + return defaultResponse("Failed to invoke function: " + e.getMessage()); + } + return defaultResponse("Function returned unsupported type"); + } + + private static Map defaultResponse(String message) { + Map out = new HashMap<>(); + out.put("output", message); + return out; + } +} diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java new file mode 100644 index 00000000..8f9b3e1c --- /dev/null +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java @@ -0,0 +1,79 @@ +package org.serverlessbench; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.microsoft.azure.functions.*; +import com.microsoft.azure.functions.annotation.AuthorizationLevel; +import com.microsoft.azure.functions.annotation.FunctionName; +import com.microsoft.azure.functions.annotation.HttpTrigger; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public class Handler { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + @FunctionName("handler") + public HttpResponseMessage handleRequest( + @HttpTrigger( + name = "req", + methods = {HttpMethod.GET, HttpMethod.POST}, + authLevel = AuthorizationLevel.ANONYMOUS) + final HttpRequestMessage> request, + final ExecutionContext context) { + + long beginNs = System.nanoTime(); + Map normalized = normalizeRequest(request); + Map result = FunctionInvoker.invoke(normalized); + long endNs = System.nanoTime(); + + Map body = new HashMap<>(); + body.put("begin", beginNs / 1_000_000_000.0); + body.put("end", endNs / 1_000_000_000.0); + body.put("compute_time", (endNs - beginNs) / 1_000.0); + body.put("results_time", 0); + body.put("result", result); + body.put("is_cold", ColdStartTracker.isCold()); + body.put("is_cold_worker", ColdStartTracker.isWorkerCold()); + body.put("request_id", context != null ? context.getInvocationId() : ""); + + String coldStartVar = System.getenv("cold_start"); + if (coldStartVar != null) { + body.put("cold_start_var", coldStartVar); + } + + String json = toJson(body); + return request + .createResponseBuilder(HttpStatus.OK) + .header("Content-Type", "application/json") + .body(json) + .build(); + } + + private Map normalizeRequest(HttpRequestMessage> request) { + if (request == null) { + return new HashMap<>(); + } + Optional body = request.getBody(); + if (body.isPresent()) { + try { + @SuppressWarnings("unchecked") + Map parsed = MAPPER.readValue(body.get(), Map.class); + return parsed; + } catch (IOException ignored) { + // ignore and continue + } + } + return new HashMap<>(request.getQueryParameters()); + } + + private String toJson(Map payload) { + try { + return MAPPER.writeValueAsString(payload); + } catch (IOException e) { + return "{}"; + } + } +} diff --git a/config/systems.json b/config/systems.json index e4db6590..9cd6e2ab 100644 --- a/config/systems.json +++ b/config/systems.json @@ -125,9 +125,11 @@ "java": { "base_images": { "x64": { + "17": "public.ecr.aws/lambda/java:17", "11": "amazon/aws-lambda-java:11" }, "arm64": { + "17": "public.ecr.aws/lambda/java:17", "11": "amazon/aws-lambda-java:11" } }, @@ -136,14 +138,13 @@ ], "deployment": { "files": [ - "Handler.java" + "pom.xml", + "src" ], - "packages": { - "com.amazonaws:aws-lambda-java-core": "1.2.3", - "com.fasterxml.jackson.core:jackson-databind": "2.15.2" - } + "packages": {}, + "module_packages": {} } - } + } }, "architecture": ["x64", "arm64"], "deployments": ["package"] @@ -203,6 +204,25 @@ "uuid": "3.4.0" } } + }, + "java": { + "base_images": { + "x64": { + "17": "mcr.microsoft.com/azure-functions/java:4-java17" + } + }, + "images": [ + "build" + ], + "username": "docker_user", + "deployment": { + "files": [ + "pom.xml", + "src" + ], + "packages": {}, + "module_packages": {} + } } }, "images": { diff --git a/dockerfiles/aws/java/Dockerfile.build b/dockerfiles/aws/java/Dockerfile.build index 2b2a09e0..2990a296 100644 --- a/dockerfiles/aws/java/Dockerfile.build +++ b/dockerfiles/aws/java/Dockerfile.build @@ -3,9 +3,8 @@ FROM ${BASE_IMAGE} ARG VERSION ENV JAVA_VERSION=${VERSION} - -# useradd, groupmod + maven -RUN yum install -y shadow-utils maven +# useradd, groupmod, build tooling +RUN yum install -y shadow-utils unzip tar gzip maven zip ENV GOSU_VERSION 1.14 # https://github.com/tianon/gosu/releases/tag/1.14 # key https://keys.openpgp.org/search?q=tianon%40debian.org @@ -14,9 +13,10 @@ RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/dow RUN mkdir -p /sebs/ COPY dockerfiles/java_installer.sh /sebs/installer.sh COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh -RUN chmod +x /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh /sebs/installer.sh # useradd and groupmod is installed in /usr/sbin which is not in PATH ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh CMD /bin/bash /sebs/installer.sh -ENTRYPOINT ["/sebs/entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/dockerfiles/aws/java/Dockerfile.function b/dockerfiles/aws/java/Dockerfile.function new file mode 100644 index 00000000..07ae2f1c --- /dev/null +++ b/dockerfiles/aws/java/Dockerfile.function @@ -0,0 +1,16 @@ +ARG BASE_IMAGE +FROM $BASE_IMAGE +ARG VERSION +ENV JAVA_VERSION=${VERSION} +ARG TARGET_ARCHITECTURE + +COPY . function/ +WORKDIR /function + +# Ensure packaged jar is present for the Lambda base image +RUN if [ -d "target" ] && ls target/*.jar >/dev/null 2>&1; then \ + cp target/*.jar function.jar; \ + fi \ + && test -f function.jar + +CMD ["org.serverlessbench.Handler::handleRequest"] diff --git a/dockerfiles/azure/java/Dockerfile.build b/dockerfiles/azure/java/Dockerfile.build new file mode 100644 index 00000000..6d7a3697 --- /dev/null +++ b/dockerfiles/azure/java/Dockerfile.build @@ -0,0 +1,18 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV JAVA_VERSION=${VERSION} + +RUN apt-get update && apt-get install -y gosu maven unzip zip \ + && apt-get clean + +RUN mkdir -p /sebs/ +COPY dockerfiles/java_installer.sh /sebs/installer.sh +COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh /sebs/installer.sh + +# useradd and groupmod are in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/dockerfiles/java_installer.sh b/dockerfiles/java_installer.sh index 1cc221f3..2fd4fc1b 100644 --- a/dockerfiles/java_installer.sh +++ b/dockerfiles/java_installer.sh @@ -1,8 +1,19 @@ #!/bin/bash -cd /mnt/function +set -euo pipefail -mvn clean install +cd /mnt/function +if [[ -f "pom.xml" ]]; then + # Note: -q flag causes issues in Docker, removed for reliable builds + mvn -DskipTests package + if ls target/*.jar >/dev/null 2>&1; then + JAR_PATH=$(ls target/*.jar | head -n1) + cp "${JAR_PATH}" function.jar + fi +fi +if [[ -f "${SCRIPT_FILE:-}" ]]; then + /bin/bash "${SCRIPT_FILE}" . +fi diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index abea416e..ca187e60 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -134,19 +134,16 @@ def package_code( directory, language_name, language_version, architecture, benchmark, is_cached ) - if (language_name == 'java'): - - jar_path = os.path.join(directory, "target", "benchmark-1.0.jar") - bytes_size = os.path.getsize(jar_path) - - return ( - jar_path, - bytes_size, - container_uri, - ) - + if language_name == "java": + jar_path = os.path.join(directory, "function.jar") + if not os.path.exists(jar_path): + raise RuntimeError("function.jar missing. Ensure Java build produced the jar.") + package_dir = os.path.join(directory, "package") + os.makedirs(package_dir, exist_ok=True) + shutil.copy2(jar_path, os.path.join(package_dir, "function.jar")) + execute("zip -qu -r9 {}.zip .".format(benchmark), shell=True, cwd=package_dir) + benchmark_archive = "{}.zip".format(os.path.join(package_dir, benchmark)) else: - # so no need to add anything here CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], "nodejs": ["handler.js", "package.json", "node_modules"], @@ -163,17 +160,23 @@ def package_code( # create zip with hidden directory but without parent directory execute("zip -qu -r9 {}.zip * .".format(benchmark), shell=True, cwd=directory) benchmark_archive = "{}.zip".format(os.path.join(directory, benchmark)) - self.logging.info("Created {} archive".format(benchmark_archive)) + self.logging.info("Created {} archive".format(benchmark_archive)) - bytes_size = os.path.getsize(os.path.join(directory, benchmark_archive)) - mbytes = bytes_size / 1024.0 / 1024.0 - self.logging.info("Zip archive size {:2f} MB".format(mbytes)) + bytes_size = os.path.getsize(benchmark_archive) + mbytes = bytes_size / 1024.0 / 1024.0 + self.logging.info("Zip archive size {:2f} MB".format(mbytes)) - return ( - os.path.join(directory, "{}.zip".format(benchmark)), - bytes_size, - container_uri, - ) + return ( + benchmark_archive, + bytes_size, + container_uri, + ) + + def _default_handler(self, language: str) -> str: + + if language == "java": + return "org.serverlessbench.Handler::handleRequest" + return "handler.handler" def _map_architecture(self, architecture: str) -> str: @@ -267,10 +270,7 @@ def create_function( create_function_params["Runtime"] = "{}{}".format( language, self._map_language_runtime(language, language_runtime) ) - if language == "java": - create_function_params["Handler"] = "Handler::handleRequest" - else: - create_function_params["Handler"] = "handler.handler" + create_function_params["Handler"] = self._default_handler(language) create_function_params = { k: v for k, v in create_function_params.items() if v is not None diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index d848d724..c9aa0b6d 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -33,12 +33,23 @@ class Azure(System): _config: AzureConfig # runtime mapping - AZURE_RUNTIMES = {"python": "python", "nodejs": "node"} + AZURE_RUNTIMES = {"python": "python", "nodejs": "node", "java": "java"} @staticmethod def name(): return "azure" + @staticmethod + def _normalize_runtime_version(language: str, version: str) -> str: + """ + Azure Functions Java expects versions with a minor component + (e.g. 17.0 instead of 17). Other languages can keep the version + as-is. + """ + if language == "java" and re.match(r"^\d+$", str(version)): + return f"{version}.0" + return version + @property def config(self) -> AzureConfig: return self._config @@ -133,36 +144,81 @@ def package_code( # In previous step we ran a Docker container which installed packages # Python packages are in .python_packages because this is expected by Azure - EXEC_FILES = {"python": "handler.py", "nodejs": "handler.js"} + EXEC_FILES = {"python": "handler.py", "nodejs": "handler.js", "java": "../function.jar"} CONFIG_FILES = { "python": ["requirements.txt", ".python_packages"], "nodejs": ["package.json", "node_modules"], + "java": ["function.jar"], } package_config = CONFIG_FILES[language_name] handler_dir = os.path.join(directory, "handler") os.makedirs(handler_dir) + + # For Java, create lib directory for JARs and exclude build artifacts + if language_name == "java": + lib_dir = os.path.join(directory, "lib") + os.makedirs(lib_dir, exist_ok=True) + # Move function.jar to lib directory + if os.path.exists(os.path.join(directory, "function.jar")): + shutil.move(os.path.join(directory, "function.jar"), os.path.join(lib_dir, "function.jar")) + # For Java, we want to keep lib and exclude source files/build artifacts + package_config = ["lib", "src", "pom.xml", "target", ".mvn", "mvnw", "mvnw.cmd"] + # move all files to 'handler' except package config for f in os.listdir(directory): if f not in package_config: source_file = os.path.join(directory, f) shutil.move(source_file, handler_dir) + + # For Java, clean up build artifacts that we don't want to deploy + if language_name == "java": + for artifact in ["src", "pom.xml", "target", ".mvn", "mvnw", "mvnw.cmd"]: + artifact_path = os.path.join(directory, artifact) + if os.path.exists(artifact_path): + if os.path.isdir(artifact_path): + shutil.rmtree(artifact_path) + else: + os.remove(artifact_path) # generate function.json # TODO: extension to other triggers than HTTP - default_function_json = { - "scriptFile": EXEC_FILES[language_name], - "bindings": [ - { - "authLevel": "anonymous", - "type": "httpTrigger", - "direction": "in", - "name": "req", - "methods": ["get", "post"], - }, - {"type": "http", "direction": "out", "name": "$return"}, - ], - } + if language_name == "java": + # Java Azure Functions - For annotation-based functions, function.json + # should include scriptFile and entryPoint + # The @FunctionName annotation determines the function name + default_function_json = { + "scriptFile": "../lib/function.jar", + "entryPoint": "org.serverlessbench.Handler.handleRequest", + "bindings": [ + { + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": ["get", "post"], + "authLevel": "anonymous" + }, + { + "type": "http", + "direction": "out", + "name": "$return" + } + ] + } + else: + default_function_json = { + "scriptFile": EXEC_FILES[language_name], + "bindings": [ + { + "authLevel": "anonymous", + "type": "httpTrigger", + "direction": "in", + "name": "req", + "methods": ["get", "post"], + }, + {"type": "http", "direction": "out", "name": "$return"}, + ], + } json_out = os.path.join(directory, "handler", "function.json") json.dump(default_function_json, open(json_out, "w"), indent=2) @@ -418,7 +474,13 @@ def create_function( raise NotImplementedError("Container deployment is not supported in Azure") language = code_package.language_name - language_runtime = code_package.language_version + language_runtime = self._normalize_runtime_version( + language, code_package.language_version + ) + # ensure string form is passed to Azure CLI + language_runtime = str(language_runtime) + if language == "java" and "." not in language_runtime: + language_runtime = f"{language_runtime}.0" resource_group = self.config.resources.resource_group(self.cli_instance) region = self.config.region function_cfg = FunctionConfig.from_benchmark(code_package) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 1374b91f..c164bad5 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -253,10 +253,13 @@ def hash_directory(directory: str, deployment: str, language: str): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], - # Use recursive Java scan since *.java files are located in subfolders. - "java": ["**/*.java", "pom.xml"], + "java": [], + } + WRAPPERS = { + "python": ["*.py"], + "nodejs": ["*.js"], + "java": ["src", "pom.xml"], } - WRAPPERS = {"python": "*.py", "nodejs": "*.js", "java": "*.java"} NON_LANG_FILES = ["*.sh", "*.json"] selected_files = FILES[language] + NON_LANG_FILES for file_type in selected_files: @@ -266,13 +269,21 @@ def hash_directory(directory: str, deployment: str, language: str): with open(path, "rb") as opened_file: hash_sum.update(opened_file.read()) # wrappers - wrappers = project_absolute_path( - "benchmarks", "wrappers", deployment, language, WRAPPERS[language] - ) - for f in glob.glob(wrappers): - path = os.path.join(directory, f) - with open(path, "rb") as opened_file: - hash_sum.update(opened_file.read()) + wrapper_patterns = WRAPPERS[language] + for pattern in wrapper_patterns: + wrappers = project_absolute_path( + "benchmarks", "wrappers", deployment, language, pattern + ) + for f in glob.glob(wrappers): + if os.path.isdir(f): + for root, _, files in os.walk(f): + for file in files: + path = os.path.join(root, file) + with open(path, "rb") as opened_file: + hash_sum.update(opened_file.read()) + else: + with open(f, "rb") as opened_file: + hash_sum.update(opened_file.read()) return hash_sum.hexdigest() def serialize(self) -> dict: @@ -322,23 +333,15 @@ def copy_code(self, output_dir): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], - "java": ["pom.xml"], + "java": [], } path = os.path.join(self.benchmark_path, self.language_name) - + if self.language_name == "java": + shutil.copytree(path, output_dir, dirs_exist_ok=True) + return for file_type in FILES[self.language_name]: for f in glob.glob(os.path.join(path, file_type)): shutil.copy2(os.path.join(path, f), output_dir) - - # copy src folder of java (java benchmarks are maven project and need directories) - if self.language == Language.JAVA: - output_src_dir = os.path.join(output_dir, "src") - - if os.path.exists(output_src_dir): - # If src dir in output exist, remove the directory and all its contents - shutil.rmtree(output_src_dir) - #To have contents of src directory in the direcory named src located in output - shutil.copytree(os.path.join(path, "src"), output_src_dir) # support node.js benchmarks with language specific packages nodejs_package_json = os.path.join(path, f"package.json.{self.language_version}") @@ -399,15 +402,13 @@ def add_deployment_files(self, output_dir): final_path = output_dir - # For Java, use Maven structure: put handler files in src/main/java/ - if self.language_name == 'java': - final_path = os.path.join(output_dir, 'src', 'main', 'java') - os.makedirs(final_path, exist_ok=True) # make sure the path exists - for file in handlers: - shutil.copy2(file, final_path) - - + destination = os.path.join(output_dir, os.path.basename(file)) + if os.path.isdir(file): + shutil.copytree(file, destination, dirs_exist_ok=True) + else: + if not os.path.exists(destination): + shutil.copy2(file, destination) def add_deployment_package_python(self, output_dir): @@ -492,7 +493,8 @@ def add_deployment_package(self, output_dir): elif self.language == Language.NODEJS: self.add_deployment_package_nodejs(output_dir) elif self.language == Language.JAVA: - self.add_deployment_package_java(output_dir) + # Java dependencies are handled by Maven in the wrapper + return else: raise NotImplementedError @@ -570,7 +572,7 @@ def ensure_image(name: str) -> None: } # run Docker container to install packages - PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json", "java" : "pom.xml"} + PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json", "java": "pom.xml"} file = os.path.join(output_dir, PACKAGE_FILES[self.language_name]) if os.path.exists(file): try: diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 5336fb48..448c78b7 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -13,7 +13,14 @@ "--deployment", default=None, choices=["local", "aws", "azure", "gcp"], action="store" ) parser.add_argument("--type", default=None, choices=["build", "run", "manage"], action="store") -parser.add_argument("--language", default=None, choices=["python", "nodejs"], action="store") +parser.add_argument( + "--language", default=None, choices=["python", "nodejs", "java"], action="store" +) +parser.add_argument( + "--platform", + default=None, + help="Optional Docker platform (e.g., linux/amd64) to override host architecture.", +) parser.add_argument("--language-version", default=None, type=str, action="store") args = parser.parse_args() config = json.load(open(os.path.join(PROJECT_DIR, "config", "systems.json"), "r")) @@ -51,8 +58,16 @@ def build(image_type, system, language=None, version=None, version_name=None): target, PROJECT_DIR, dockerfile, buildargs ) ) + platform_arg = args.platform or os.environ.get("DOCKER_DEFAULT_PLATFORM") + try: - client.images.build(path=PROJECT_DIR, dockerfile=dockerfile, buildargs=buildargs, tag=target) + client.images.build( + path=PROJECT_DIR, + dockerfile=dockerfile, + buildargs=buildargs, + tag=target, + platform=platform_arg, + ) except docker.errors.BuildError as exc: print("Error! Build failed!") print(exc) From 8ed60e1b0571c28ab61ea5db73d4743c19f88e40 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 Mar 2026 23:07:59 +0100 Subject: [PATCH 30/82] [java] Ensure that failed invocations are not reported as success --- .../main/java/org/serverlessbench/FunctionInvoker.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java index 7d9c8357..9c502f86 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java @@ -24,13 +24,13 @@ static Map invoke(Map input) { return casted; } } catch (ClassNotFoundException e) { - return defaultResponse("Function implementation not found"); + throw new RuntimeException("Function implementation not found"); } catch (NoSuchMethodException e) { - return defaultResponse("Function.handler(Map) missing"); + throw new RuntimeException("Function.handler(Map) missing"); } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - return defaultResponse("Failed to invoke function: " + e.getMessage()); + throw new RuntimeException("Failed to invoke function: " + e.getMessage()); } - return defaultResponse("Function returned unsupported type"); + throw new RuntimeException("Function returned unsupported type"); } private static Map defaultResponse(String message) { From 528a1d9850f0a882c4380ef836b66a7f7b5477f8 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 Mar 2026 23:08:14 +0100 Subject: [PATCH 31/82] [aws][java] Upload the correct .jar directly --- sebs/aws/aws.py | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index ca187e60..21ed7a6c 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -135,14 +135,19 @@ def package_code( ) if language_name == "java": - jar_path = os.path.join(directory, "function.jar") + + jar_path = os.path.join(directory, "target", "benchmark-1.0.jar") + bytes_size = os.path.getsize(jar_path) + mbytes = bytes_size / 1024.0 / 1024.0 if not os.path.exists(jar_path): - raise RuntimeError("function.jar missing. Ensure Java build produced the jar.") - package_dir = os.path.join(directory, "package") - os.makedirs(package_dir, exist_ok=True) - shutil.copy2(jar_path, os.path.join(package_dir, "function.jar")) - execute("zip -qu -r9 {}.zip .".format(benchmark), shell=True, cwd=package_dir) - benchmark_archive = "{}.zip".format(os.path.join(package_dir, benchmark)) + raise RuntimeError( + f"Java artifact {jar_path} missing. Ensure Java build produced the jar." + ) + + self.logging.info(f"Created {jar_path} archive") + self.logging.info("Zip archive size {:2f} MB".format(mbytes)) + + return (jar_path, bytes_size, container_uri) else: CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], @@ -160,17 +165,18 @@ def package_code( # create zip with hidden directory but without parent directory execute("zip -qu -r9 {}.zip * .".format(benchmark), shell=True, cwd=directory) benchmark_archive = "{}.zip".format(os.path.join(directory, benchmark)) - self.logging.info("Created {} archive".format(benchmark_archive)) - bytes_size = os.path.getsize(benchmark_archive) - mbytes = bytes_size / 1024.0 / 1024.0 - self.logging.info("Zip archive size {:2f} MB".format(mbytes)) + self.logging.info("Created {} archive".format(benchmark_archive)) - return ( - benchmark_archive, - bytes_size, - container_uri, - ) + bytes_size = os.path.getsize(benchmark_archive) + mbytes = bytes_size / 1024.0 / 1024.0 + self.logging.info("Zip archive size {:2f} MB".format(mbytes)) + + return ( + benchmark_archive, + bytes_size, + container_uri, + ) def _default_handler(self, language: str) -> str: From ebca3404e3a6825759252e19d558ef49c08719e4 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 11:36:30 +0100 Subject: [PATCH 32/82] [system][java] Major fixes to Java handling and exporting build errors from build container We didn't copy the entire code in a nested fashion, and we didn't fill the pom.xml file with system dependencies --- sebs/benchmark.py | 133 +++++++++++++++++++++++++++------------------- 1 file changed, 79 insertions(+), 54 deletions(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index c164bad5..b90b6e03 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -4,7 +4,6 @@ import subprocess import os import shutil -import subprocess from abc import abstractmethod from typing import Any, Callable, Dict, List, Optional, Tuple @@ -25,7 +24,11 @@ class BenchmarkConfig: def __init__( - self, timeout: int, memory: int, languages: List["Language"], modules: List[BenchmarkModule] + self, + timeout: int, + memory: int, + languages: List["Language"], + modules: List[BenchmarkModule], ): self._timeout = timeout self._memory = memory @@ -253,18 +256,18 @@ def hash_directory(directory: str, deployment: str, language: str): FILES = { "python": ["*.py", "requirements.txt*"], "nodejs": ["*.js", "package.json"], - "java": [], + "java": ["*.java", "pom.xml"], } WRAPPERS = { "python": ["*.py"], "nodejs": ["*.js"], - "java": ["src", "pom.xml"], + "java": ["src"], } NON_LANG_FILES = ["*.sh", "*.json"] selected_files = FILES[language] + NON_LANG_FILES for file_type in selected_files: - for f in glob.glob(os.path.join(directory, file_type), recursive=True): - if os.path.isfile(f): + for f in glob.glob(os.path.join(directory, "**", file_type), recursive=True): + if os.path.isfile(f): path = os.path.join(directory, f) with open(path, "rb") as opened_file: hash_sum.update(opened_file.read()) @@ -336,40 +339,19 @@ def copy_code(self, output_dir): "java": [], } path = os.path.join(self.benchmark_path, self.language_name) - if self.language_name == "java": + if self.language == Language.JAVA: + # In Java, we copy the entire nested directory. shutil.copytree(path, output_dir, dirs_exist_ok=True) return for file_type in FILES[self.language_name]: for f in glob.glob(os.path.join(path, file_type)): shutil.copy2(os.path.join(path, f), output_dir) - + # support node.js benchmarks with language specific packages nodejs_package_json = os.path.join(path, f"package.json.{self.language_version}") if os.path.exists(nodejs_package_json): shutil.copy2(nodejs_package_json, os.path.join(output_dir, "package.json")) - #This is for making jar file and add it to docker directory - def add_java_output(self, code_dir): - from sebs.faas.function import Language - if self.language == Language.JAVA: - - # Step 1: Move Main.java o src directory - src_dir = os.path.join(code_dir, "src", "main", "java") - if os.path.exists(code_dir): - main_java_path = os.path.join(code_dir, "Main.java") - if os.path.exists(main_java_path): - shutil.move(main_java_path, src_dir) - - # Step 2: Run mvn clean install - try: - # Navigate to the code directory where the pom.xml file is located - subprocess.run(['mvn', 'clean', 'install'], cwd=code_dir, check=True, text=True, capture_output=True) - print("Maven build successful!") - except subprocess.CalledProcessError as e: - print(f"Error during Maven build:\n{e.stdout}\n{e.stderr}") - return - - def add_benchmark_data(self, output_dir): cmd = "/bin/bash {benchmark_path}/init.sh {output_dir} false {architecture}" paths = [ @@ -400,8 +382,6 @@ def add_deployment_files(self, output_dir): ) ] - final_path = output_dir - for file in handlers: destination = os.path.join(output_dir, os.path.basename(file)) if os.path.isdir(file): @@ -451,7 +431,7 @@ def add_deployment_package_nodejs(self, output_dir): with open(package_config, "w") as package_file: json.dump(package_json, package_file, indent=2) - # Dependencies in system.json are in "group:artifact": version format; + # Dependencies in system.json are in "group:artifact": version format; # this function converts them to proper Maven blocks. def format_maven_dependency(self, group_artifact: str, version: str) -> str: group_id, artifact_id = group_artifact.split(":") @@ -461,26 +441,34 @@ def format_maven_dependency(self, group_artifact: str, version: str) -> str: {artifact_id} {version} """ - + def add_deployment_package_java(self, output_dir): - + pom_path = os.path.join(output_dir, "pom.xml") with open(pom_path, "r") as f: pom_content = f.read() - packages = self._system_config.deployment_packages(self._deployment_name, self.language_name) + packages = self._system_config.deployment_packages( + self._deployment_name, self.language_name + ) dependency_blocks = "" if len(packages): for key, val in packages.items(): dependency_name = key.strip('"').strip("'") dependency_version = val.strip('"').strip("'") - dependency_blocks += self.format_maven_dependency(dependency_name, dependency_version) + "\n" + dependency_blocks += ( + self.format_maven_dependency(dependency_name, dependency_version) + "\n" + ) if "" not in pom_content: - raise ValueError("pom.xml template is missing placeholder") + raise ValueError( + "pom.xml template is missing placeholder" + ) - pom_content = pom_content.replace("", dependency_blocks.strip()) + pom_content = pom_content.replace( + "", dependency_blocks.strip() + ) with open(pom_path, "w") as f: f.write(pom_content) @@ -493,8 +481,7 @@ def add_deployment_package(self, output_dir): elif self.language == Language.NODEJS: self.add_deployment_package_nodejs(output_dir) elif self.language == Language.JAVA: - # Java dependencies are handled by Maven in the wrapper - return + self.add_deployment_package_java(output_dir) else: raise NotImplementedError @@ -572,7 +559,11 @@ def ensure_image(name: str) -> None: } # run Docker container to install packages - PACKAGE_FILES = {"python": "requirements.txt", "nodejs": "package.json", "java": "pom.xml"} + PACKAGE_FILES = { + "python": "requirements.txt", + "nodejs": "package.json", + "java": "pom.xml", + } file = os.path.join(output_dir, PACKAGE_FILES[self.language_name]) if os.path.exists(file): try: @@ -588,7 +579,7 @@ def ensure_image(name: str) -> None: path=os.path.abspath(output_dir) ) ) - stdout = self._docker_client.containers.run( + container = self._docker_client.containers.run( "{}:{}".format(repo_name, image_name), volumes=volumes, environment={ @@ -599,10 +590,24 @@ def ensure_image(name: str) -> None: "PLATFORM": self._deployment_name.upper(), "TARGET_ARCHITECTURE": self._experiment_config._architecture, }, - remove=True, - stdout=True, - stderr=True, + remove=False, + detach=True, ) + try: + exit_code = container.wait() + stdout = container.logs() + if exit_code["StatusCode"] != 0: + error_log_path = os.path.join(output_dir, "error.log") + with open(error_log_path, "wb") as error_file: + error_file.write(stdout) + self.logging.error( + f"Build failed! Container exited with " + f"code {exit_code['StatusCode']}" + ) + self.logging.error(f"Logs saved to {error_log_path}") + raise RuntimeError("Package build failed!") + finally: + container.remove() # Hack to enable builds on platforms where Docker mounted volumes # are not supported. Example: CircleCI docker environment else: @@ -661,7 +666,7 @@ def ensure_image(name: str) -> None: self.logging.error("Package build failed!") self.logging.error(e) self.logging.error(f"Docker mount volumes: {volumes}") - raise e + raise e from None def recalculate_code_size(self): self._code_size = Benchmark.directory_size(self._output_dir) @@ -680,7 +685,12 @@ def build( "Using cached benchmark {} at {}".format(self.benchmark, self.code_location) ) if self.container_deployment: - return False, self.code_location, self.container_deployment, self.container_uri + return ( + False, + self.code_location, + self.container_deployment, + self.container_uri, + ) return False, self.code_location, self.container_deployment, "" @@ -701,7 +711,6 @@ def build( self.copy_code(self._output_dir) self.add_benchmark_data(self._output_dir) self.add_deployment_files(self._output_dir) -# self.add_java_output(self._output_dir) self.add_deployment_package(self._output_dir) self.install_dependencies(self._output_dir) @@ -732,7 +741,12 @@ def build( self._cache_client.add_code_package(self._deployment_name, self) self.query_cache() - return True, self._code_location, self._container_deployment, self._container_uri + return ( + True, + self._code_location, + self._container_deployment, + self._container_uri, + ) """ Locates benchmark input generator, inspect how many storage buckets @@ -745,9 +759,11 @@ def build( """ def prepare_input( - self, system_resources: SystemResources, size: str, replace_existing: bool = False + self, + system_resources: SystemResources, + size: str, + replace_existing: bool = False, ): - """ Handle object storage buckets. """ @@ -774,7 +790,10 @@ def prepare_input( if hasattr(self._benchmark_input_module, "allocate_nosql"): nosql_storage = system_resources.get_nosql_storage() - for name, table_properties in self._benchmark_input_module.allocate_nosql().items(): + for ( + name, + table_properties, + ) in self._benchmark_input_module.allocate_nosql().items(): nosql_storage.create_benchmark_tables( self._benchmark, name, @@ -791,7 +810,13 @@ def prepare_input( # storage.allocate_buckets(self.benchmark, buckets) # Get JSON and upload data as required by benchmark input_config = self._benchmark_input_module.generate_input( - self._benchmark_data_path, size, bucket, input, output, storage_func, nosql_func + self._benchmark_data_path, + size, + bucket, + input, + output, + storage_func, + nosql_func, ) # Cache only once we data is in the cloud. From 2974d2d03be21444dc39ff062e1c6471c7c501f2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 11:36:59 +0100 Subject: [PATCH 33/82] [docker] Ensure that Maven package is always rebuilt --- dockerfiles/java_installer.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dockerfiles/java_installer.sh b/dockerfiles/java_installer.sh index 2fd4fc1b..0bc1d953 100644 --- a/dockerfiles/java_installer.sh +++ b/dockerfiles/java_installer.sh @@ -6,7 +6,7 @@ cd /mnt/function if [[ -f "pom.xml" ]]; then # Note: -q flag causes issues in Docker, removed for reliable builds - mvn -DskipTests package + mvn -DskipTests clean package if ls target/*.jar >/dev/null 2>&1; then JAR_PATH=$(ls target/*.jar | head -n1) From 21849d31b0bcff0452cb820271c41d62f40e15ed Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 11:37:48 +0100 Subject: [PATCH 34/82] [dev] Linting --- sebs/openwhisk/openwhisk.py | 38 ++++++++++++++++++------------------ tools/build_docker_images.py | 21 ++++++++++++++------ 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 0dc61b75..bde916ab 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -110,7 +110,7 @@ def package_code( directory, language_name, language_version, architecture, benchmark, is_cached ) - if language_name != 'java': + if language_name != "java": # We deploy Minio config in code package since this depends on local # deployment - it cannnot be a part of Docker image CONFIG_FILES = { @@ -209,24 +209,24 @@ def create_function( code_package.architecture, ) run_arguments = [ - *self.get_wsk_cmd(), - "action", - "create", - func_name, - "--web", - "true", - "--docker", - docker_image, - "--memory", - str(code_package.benchmark_config.memory), - "--timeout", - str(code_package.benchmark_config.timeout * 1000), - *self.storage_arguments(), - code_package.code_location, - ] - if code_package.language_name == 'java': + *self.get_wsk_cmd(), + "action", + "create", + func_name, + "--web", + "true", + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments(), + code_package.code_location, + ] + if code_package.language_name == "java": run_arguments.extend(["--main", "Main"]) - + subprocess.run( [ *self.get_wsk_cmd(), @@ -248,7 +248,7 @@ def create_function( stdout=subprocess.PIPE, check=True, ) - + function_cfg.docker_image = docker_image res = OpenWhiskFunction( func_name, code_package.benchmark, code_package.hash, function_cfg diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 448c78b7..992c8c43 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -10,9 +10,14 @@ parser = argparse.ArgumentParser(description="Run local app experiments.") parser.add_argument( - "--deployment", default=None, choices=["local", "aws", "azure", "gcp"], action="store" + "--deployment", + default=None, + choices=["local", "aws", "azure", "gcp"], + action="store", +) +parser.add_argument( + "--type", default=None, choices=["build", "run", "manage"], action="store" ) -parser.add_argument("--type", default=None, choices=["build", "run", "manage"], action="store") parser.add_argument( "--language", default=None, choices=["python", "nodejs", "java"], action="store" ) @@ -36,7 +41,9 @@ def build(image_type, system, language=None, version=None, version_name=None): msg += " with version *" + version + "*" print(msg) if language is not None: - dockerfile = os.path.join(DOCKER_DIR, system, language, f"Dockerfile.{image_type}") + dockerfile = os.path.join( + DOCKER_DIR, system, language, f"Dockerfile.{image_type}" + ) else: dockerfile = os.path.join(DOCKER_DIR, system, f"Dockerfile.{image_type}") target = f'{config["general"]["docker_repository"]}:{image_type}.{system}' @@ -73,8 +80,8 @@ def build(image_type, system, language=None, version=None, version_name=None): print(exc) print("Build log") for line in exc.build_log: - if 'stream' in line: - print(line['stream'].strip()) + if "stream" in line: + print(line["stream"].strip()) def build_language(system, language, language_config): @@ -105,7 +112,9 @@ def build_systems(system, system_config): print(f"Skipping manage image for {system}") else: if args.language: - build_language(system, args.language, system_config["languages"][args.language]) + build_language( + system, args.language, system_config["languages"][args.language] + ) else: for language, language_dict in system_config["languages"].items(): build_language(system, language, language_dict) From 7843b6d01236e7673f72693e7a8c84766e07e62f Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 11:40:24 +0100 Subject: [PATCH 35/82] [java] Standardizing benchmark structure --- .../600.java/601.hello-world/config.json | 4 +- benchmarks/600.java/601.hello-world/input.py | 4 +- .../600.java/601.hello-world/java/pom.xml | 2 +- .../{faas/App.java => function/Function.java} | 4 +- benchmarks/wrappers/aws/java/pom.xml | 55 ------------------- 5 files changed, 7 insertions(+), 62 deletions(-) rename benchmarks/600.java/601.hello-world/java/src/main/java/{faas/App.java => function/Function.java} (85%) delete mode 100644 benchmarks/wrappers/aws/java/pom.xml diff --git a/benchmarks/600.java/601.hello-world/config.json b/benchmarks/600.java/601.hello-world/config.json index e3d6f85f..89e10df5 100644 --- a/benchmarks/600.java/601.hello-world/config.json +++ b/benchmarks/600.java/601.hello-world/config.json @@ -1,6 +1,6 @@ { "timeout": 60, "memory": 256, - "languages": ["java"] + "languages": ["java"], + "modules": [] } - diff --git a/benchmarks/600.java/601.hello-world/input.py b/benchmarks/600.java/601.hello-world/input.py index 52536abf..76e0a845 100644 --- a/benchmarks/600.java/601.hello-world/input.py +++ b/benchmarks/600.java/601.hello-world/input.py @@ -8,6 +8,6 @@ def generate_input( input_paths, output_paths, upload_func, - nosql_func=None + nosql_func=None ): - return { } \ No newline at end of file + return { } diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/600.java/601.hello-world/java/pom.xml index d504d9bc..9947e6bb 100644 --- a/benchmarks/600.java/601.hello-world/java/pom.xml +++ b/benchmarks/600.java/601.hello-world/java/pom.xml @@ -3,7 +3,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - faas + function benchmark 1.0 diff --git a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java b/benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java similarity index 85% rename from benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java rename to benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java index fe0b2096..3dc09b6a 100644 --- a/benchmarks/600.java/601.hello-world/java/src/main/java/faas/App.java +++ b/benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java @@ -1,8 +1,8 @@ -package faas; +package function; import java.util.HashMap; import java.util.Map; -public class App { +public class Function { public Map handler(Map input) { Map result = new HashMap<>(); diff --git a/benchmarks/wrappers/aws/java/pom.xml b/benchmarks/wrappers/aws/java/pom.xml deleted file mode 100644 index f4d08321..00000000 --- a/benchmarks/wrappers/aws/java/pom.xml +++ /dev/null @@ -1,55 +0,0 @@ - - 4.0.0 - org.serverlessbench - function - 1.0.0 - - 17 - 17 - - - - com.amazonaws - aws-lambda-java-core - 1.2.3 - - - com.fasterxml.jackson.core - jackson-databind - 2.17.1 - - - - function - - - org.apache.maven.plugins - maven-shade-plugin - 3.5.1 - - - package - - shade - - - false - - - *:* - - META-INF/*.SF - META-INF/*.RSA - META-INF/*.DSA - - - - - - - - - - From 14ad548d2fa63f3b810acadfe00d92c46067e769 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 12:18:35 +0100 Subject: [PATCH 36/82] [java][aws] Unify outputs and handlers --- benchmarks/wrappers/aws/java/Handler.java | 75 ------------------- .../org/serverlessbench/ColdStartTracker.java | 23 +++++- .../java/org/serverlessbench/Handler.java | 41 ++++++++-- 3 files changed, 55 insertions(+), 84 deletions(-) delete mode 100644 benchmarks/wrappers/aws/java/Handler.java diff --git a/benchmarks/wrappers/aws/java/Handler.java b/benchmarks/wrappers/aws/java/Handler.java deleted file mode 100644 index b51de44c..00000000 --- a/benchmarks/wrappers/aws/java/Handler.java +++ /dev/null @@ -1,75 +0,0 @@ -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.core.type.TypeReference; - -import faas.App; - -import java.io.File; -import java.io.IOException; -import java.time.Instant; -import java.util.HashMap; -import java.util.Map; - -public class Handler implements RequestHandler, String> { - private static final ObjectMapper mapper = new ObjectMapper(); - - @Override - public String handleRequest(Map event, Context context) { - - Map inputData = event; - - // Extract input if trigger is API Gateway (body is a string) - if (event.containsKey("body") && event.get("body") instanceof String) - try { - inputData = mapper.readValue((String) event.get("body"),new TypeReference>() {}); - } catch (IOException e) { - throw new RuntimeException("Failed to parse JSON body", e); - } - - App function = new App(); - - Instant begin = Instant.now(); - long start_nano = System.nanoTime(); - - Map functionOutput = function.handler(inputData); - - long end_nano = System.nanoTime(); - Instant end = Instant.now(); - - - long computeTime = end_nano - start_nano; - // Detect cold start - boolean isCold = false; - String fileName = "/tmp/cold_run"; - - File file = new File(fileName); - if (!file.exists()) { - isCold = true; - try { - file.createNewFile(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - // Convert to Unix timestamp in seconds.microseconds - String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); // Convert nanoseconds to microseconds - String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); - - - Map result = new HashMap<>(); - result.put("begin", formattedBegin); - result.put("end", formattedEnd); - result.put("request_id", context.getAwsRequestId()); - result.put("compute_time", computeTime); - result.put("is_cold", isCold); - result.put("result", functionOutput); - try { - return mapper.writeValueAsString(result); - } catch (IOException e) { - throw new RuntimeException("Failed to serialize result of benchmark to JSON in Wrapper", e); - } - - } -} diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java index e7cb2e01..06a6572f 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -11,6 +11,7 @@ final class ColdStartTracker { private static final AtomicBoolean COLD = new AtomicBoolean(true); private static final Path MARKER = Path.of("/tmp/cold_run"); + private static String containerId = null; private ColdStartTracker() {} @@ -22,14 +23,28 @@ static boolean isCold() { boolean first = COLD.getAndSet(false); if (first) { try { - Files.writeString( - MARKER, - UUID.randomUUID().toString().substring(0, 8), - StandardCharsets.UTF_8); + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); } catch (IOException ignored) { // best-effort marker write } } return first; } + + static String getContainerId() { + if (containerId == null) { + try { + if (Files.exists(MARKER)) { + containerId = Files.readString(MARKER, StandardCharsets.UTF_8); + } else { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); + } + } catch (IOException e) { + containerId = UUID.randomUUID().toString().substring(0, 8); + } + } + return containerId; + } } diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java index 5c1781e6..3cf9b84a 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java @@ -13,21 +13,52 @@ public class Handler implements RequestHandler, Map handleRequest(Map event, Context context) { + long beginMs = System.currentTimeMillis(); long beginNs = System.nanoTime(); Map normalized = normalize(event); Map result = FunctionInvoker.invoke(normalized); long endNs = System.nanoTime(); + long endMs = System.currentTimeMillis(); + + // Format timestamps as "seconds.microseconds" like Python + String beginStr = formatTimestamp(beginMs, beginNs); + String endStr = formatTimestamp(endMs, endNs); + + // Get or create container ID + String containerId = ColdStartTracker.getContainerId(); + + // Get cold_start environment variable if present + String coldStartVar = System.getenv("cold_start"); + if (coldStartVar == null) { + coldStartVar = ""; + } Map body = new HashMap<>(); - body.put("begin", beginNs / 1_000_000_000.0); - body.put("end", endNs / 1_000_000_000.0); - body.put("compute_time", (endNs - beginNs) / 1_000.0); + body.put("begin", beginStr); + body.put("end", endStr); body.put("results_time", 0); - body.put("result", result); body.put("is_cold", ColdStartTracker.isCold()); + body.put("result", result); body.put("request_id", context != null ? context.getAwsRequestId() : ""); + body.put("cold_start_var", coldStartVar); + body.put("container_id", containerId); + + Map response = new HashMap<>(); + response.put("statusCode", 200); + try { + response.put("body", MAPPER.writeValueAsString(body)); + } catch (Exception e) { + response.put("body", "{}"); + } + + return response; + } - return body; + private String formatTimestamp(long epochMillis, long nanoTime) { + long seconds = epochMillis / 1000; + // Use nanos for microseconds precision + long microseconds = (nanoTime / 1000) % 1_000_000; + return String.format("%d.%06d", seconds, microseconds); } private Map normalize(Map event) { From 106a016278ad37801d397b91c9a900f8dbe48826 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 12:31:26 +0100 Subject: [PATCH 37/82] [java] Unify microbenchmarks 601.hello-world and 010.sleep --- .../010.sleep}/java/pom.xml | 0 benchmarks/600.java/601.hello-world/config.json | 6 ------ benchmarks/600.java/601.hello-world/input.py | 13 ------------- .../java/src/main/java/function/Function.java | 13 ------------- 4 files changed, 32 deletions(-) rename benchmarks/{600.java/601.hello-world => 000.microbenchmarks/010.sleep}/java/pom.xml (100%) delete mode 100644 benchmarks/600.java/601.hello-world/config.json delete mode 100644 benchmarks/600.java/601.hello-world/input.py delete mode 100644 benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java diff --git a/benchmarks/600.java/601.hello-world/java/pom.xml b/benchmarks/000.microbenchmarks/010.sleep/java/pom.xml similarity index 100% rename from benchmarks/600.java/601.hello-world/java/pom.xml rename to benchmarks/000.microbenchmarks/010.sleep/java/pom.xml diff --git a/benchmarks/600.java/601.hello-world/config.json b/benchmarks/600.java/601.hello-world/config.json deleted file mode 100644 index 89e10df5..00000000 --- a/benchmarks/600.java/601.hello-world/config.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "timeout": 60, - "memory": 256, - "languages": ["java"], - "modules": [] -} diff --git a/benchmarks/600.java/601.hello-world/input.py b/benchmarks/600.java/601.hello-world/input.py deleted file mode 100644 index 76e0a845..00000000 --- a/benchmarks/600.java/601.hello-world/input.py +++ /dev/null @@ -1,13 +0,0 @@ -def buckets_count(): - return (0, 0) - -def generate_input( - data_dir, - size, - benchmarks_bucket, - input_paths, - output_paths, - upload_func, - nosql_func=None -): - return { } diff --git a/benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java b/benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java deleted file mode 100644 index 3dc09b6a..00000000 --- a/benchmarks/600.java/601.hello-world/java/src/main/java/function/Function.java +++ /dev/null @@ -1,13 +0,0 @@ -package function; -import java.util.HashMap; -import java.util.Map; - -public class Function { - public Map handler(Map input) { - - Map result = new HashMap<>(); - result.put("Hello", "World"); - return result; - } -} - From ca127b1320af43ea8d99ceb972314a594a0e7bc7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 12:49:25 +0100 Subject: [PATCH 38/82] [java] Downgrade maven plugin and export system-level dependencies to system.json --- .../100.webapps/110.dynamic-html/java/pom.xml | 33 ++++++++++--------- config/systems.json | 12 ++++--- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml index fb7e685e..f71142db 100644 --- a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml +++ b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml @@ -4,12 +4,11 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 function - dynamic-html + benchmark 1.0 - 17 - 17 UTF-8 + ${env.JAVA_VERSION} @@ -18,18 +17,7 @@ compiler 0.9.10
- - - com.microsoft.azure.functions - azure-functions-java-library - 3.0.0 - - - - com.fasterxml.jackson.core - jackson-databind - 2.17.1 - + @@ -42,10 +30,22 @@ + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + ${java.version} + ${java.version} + + + + org.apache.maven.plugins maven-shade-plugin - 3.5.1 + 3.2.4 package @@ -58,6 +58,7 @@ *:* + module-info.class META-INF/*.SF META-INF/*.RSA META-INF/*.DSA diff --git a/config/systems.json b/config/systems.json index 9cd6e2ab..385327d9 100644 --- a/config/systems.json +++ b/config/systems.json @@ -138,10 +138,12 @@ ], "deployment": { "files": [ - "pom.xml", "src" ], - "packages": {}, + "packages": { + "com.amazonaws:aws-lambda-java-core": "1.2.3", + "com.fasterxml.jackson.core:jackson-databind": "2.17.1" + }, "module_packages": {} } } @@ -217,10 +219,12 @@ "username": "docker_user", "deployment": { "files": [ - "pom.xml", "src" ], - "packages": {}, + "packages": { + "com.microsoft.azure.functions:azure-functions-java-library": "3.0.0", + "com.fasterxml.jackson.core:jackson-databind": "2.17.1" + }, "module_packages": {} } } From c27c80e2008c1089b52a8b1b5443ec086dbd3390 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 13:18:56 +0100 Subject: [PATCH 39/82] [dev] Linting and docstrings --- sebs/azure/azure.py | 20 ++++++++------------ sebs/benchmark.py | 33 ++++++++++++++++++++------------- sebs/openwhisk/openwhisk.py | 4 +--- 3 files changed, 29 insertions(+), 28 deletions(-) diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index 17f92515..badad216 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -245,7 +245,7 @@ def package_code( EXEC_FILES = { Language.PYTHON: "handler.py", Language.NODEJS: "handler.js", - Language.JAVA: "../lib/function.jar" + Language.JAVA: "../lib/function.jar", } CONFIG_FILES = { Language.PYTHON: ["requirements.txt", ".python_packages"], @@ -263,7 +263,9 @@ def package_code( os.makedirs(lib_dir, exist_ok=True) # Move function.jar to lib directory if os.path.exists(os.path.join(directory, "function.jar")): - shutil.move(os.path.join(directory, "function.jar"), os.path.join(lib_dir, "function.jar")) + shutil.move( + os.path.join(directory, "function.jar"), os.path.join(lib_dir, "function.jar") + ) # move all files to 'handler' except package config for f in os.listdir(directory): @@ -296,14 +298,10 @@ def package_code( "direction": "in", "name": "req", "methods": ["get", "post"], - "authLevel": "anonymous" + "authLevel": "anonymous", }, - { - "type": "http", - "direction": "out", - "name": "$return" - } - ] + {"type": "http", "direction": "out", "name": "$return"}, + ], } else: default_function_json = { @@ -667,9 +665,7 @@ def create_function( raise NotImplementedError("Container deployment is not supported in Azure") language = code_package.language_name - language_runtime = self._normalize_runtime_version( - language, code_package.language_version - ) + language_runtime = self._normalize_runtime_version(language, code_package.language_version) # ensure string form is passed to Azure CLI language_runtime = str(language_runtime) if language == "java" and "." not in language_runtime: diff --git a/sebs/benchmark.py b/sebs/benchmark.py index a6a68556..a3e0714d 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -802,9 +802,18 @@ def add_deployment_package_nodejs(self, output_dir: str) -> None: with open(package_config, "w") as package_file: json.dump(package_json, package_file, indent=2) - # Dependencies in system.json are in "group:artifact": version format; - # this function converts them to proper Maven blocks. def format_maven_dependency(self, group_artifact: str, version: str) -> str: + """Helper method to format Java system dependencies. + Dependencies in system.json are in "group:artifact": version format; + this function converts them to proper Maven blocks. + + Args: + group_artifact: name of library to add to benchmark + version: library version + + Returns: + XML-formatted block inserted into pom.xml + """ group_id, artifact_id = group_artifact.split(":") return f""" @@ -813,8 +822,16 @@ def format_maven_dependency(self, group_artifact: str, version: str) -> str: {version} """ - def add_deployment_package_java(self, output_dir): + def add_deployment_package_java(self, output_dir: str): + """Extend benchmark's pom.xml with system-specific packages. + All Java dependencies for each platform are defined in systems.json. + + Args: + output_dir: benchmark directory containing pom.xml to modify + Raises: + ValueError: when benchmark's pom.xml is missing placeholder + """ pom_path = os.path.join(output_dir, "pom.xml") with open(pom_path, "r") as f: pom_content = f.read() @@ -1347,16 +1364,6 @@ def build( self._container_uri, ) - """ - Locates benchmark input generator, inspect how many storage buckets - are needed and launches corresponding storage instance, if necessary. - - :param client: Deployment client - :param benchmark: - :param benchmark_path: - :param size: Benchmark workload size - """ - def prepare_input( self, system_resources: SystemResources, diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index f1278f5c..6ebfc454 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -237,7 +237,6 @@ def package_code( else: package_config = [] - benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( ["zip", benchmark_archive] + package_config, @@ -408,8 +407,7 @@ def create_function( ) subprocess.run( - [ - ], + run_arguments, stderr=subprocess.PIPE, stdout=subprocess.PIPE, check=True, From 9f6dbcf461519b8927b568a2f6f292672b6eee04 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 13:19:04 +0100 Subject: [PATCH 40/82] [dev] Extend linting scripts with docstring coverage --- tools/linting.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tools/linting.py b/tools/linting.py index cbdf7832..25a6df96 100755 --- a/tools/linting.py +++ b/tools/linting.py @@ -20,4 +20,8 @@ def call(linter, source, args): print("Check static typing") ret = ret | call("mypy", arg, "--config-file=.mypy.ini") + +print("Check documentation coverage") +ret = ret | call("interrogate", arg, "-v --fail-under 100") + exit(ret) From f5e2ee01369f734acb0d9678b1e40aa0bc315304 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 13:38:39 +0100 Subject: [PATCH 41/82] [system] Bump GCP requirements to work with Python 3.12 --- requirements.gcp.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements.gcp.txt b/requirements.gcp.txt index 60f59150..37d3e37e 100644 --- a/requirements.gcp.txt +++ b/requirements.gcp.txt @@ -1,7 +1,7 @@ grpcio -google-cloud-storage==1.32.0 -google-api-python-client==1.12.5 -google-cloud-monitoring==2.0.0 +google-cloud-storage>=1.32.0 +google-api-python-client>=1.12.5 +google-cloud-monitoring>=2.0.0 google-api-python-client-stubs -google-cloud-logging==2.0.0 +google-cloud-logging>=2.0.0 google-cloud-datastore From e616406131594555959a30df9956420f1ae48b1c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 13:38:50 +0100 Subject: [PATCH 42/82] [system] Revert incorrect AWS setting --- config/systems.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/systems.json b/config/systems.json index dd5bd5a8..f26d94a0 100644 --- a/config/systems.json +++ b/config/systems.json @@ -169,7 +169,7 @@ } }, "architecture": ["x64", "arm64"], - "deployments": ["package"] + "deployments": ["package", "container"] }, "azure": { "languages": { From 5fc5c86df0b342a5d4ffd8fd42295e5640bd0a3e Mon Sep 17 00:00:00 2001 From: Alexander Schlieper Date: Thu, 8 Jan 2026 14:38:01 +0100 Subject: [PATCH 43/82] Feature: Multi-language Support (Rust, PyPy, Java) & Enhanced Benchmarking Tools - Added Rust runtime support (AWS) - Added PyPy support (AWS, Azure, GCP, Local) - Added Java runtime support (AWS, Azure) - Added cross-platform benchmarking scripts - Enhanced plotting and visualization tools - Updated Dockerfiles and system configurations - Removed sensitive credentials from config files --- .../100.webapps/110.dynamic-html/java/pom.xml | 4 ++++ dockerfiles/aws/java/Dockerfile.build | 7 +++++- dockerfiles/aws/java/Dockerfile.function | 18 +++++++++++--- dockerfiles/java_installer.sh | 24 ++++++++++++++++--- 4 files changed, 46 insertions(+), 7 deletions(-) diff --git a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml index f71142db..5410ade8 100644 --- a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml +++ b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml @@ -20,6 +20,7 @@ + function ${project.basedir}/templates @@ -62,6 +63,9 @@ META-INF/*.SF META-INF/*.RSA META-INF/*.DSA + module-info.class + META-INF/versions/*/module-info.class + META-INF/versions/**/module-info.class diff --git a/dockerfiles/aws/java/Dockerfile.build b/dockerfiles/aws/java/Dockerfile.build index 2990a296..bd977d69 100644 --- a/dockerfiles/aws/java/Dockerfile.build +++ b/dockerfiles/aws/java/Dockerfile.build @@ -4,7 +4,12 @@ ARG VERSION ENV JAVA_VERSION=${VERSION} # useradd, groupmod, build tooling -RUN yum install -y shadow-utils unzip tar gzip maven zip +RUN yum install -y shadow-utils unzip tar gzip zip +# Install Maven 3.x (maven package may be old, install from Apache directly) +RUN curl -fsSL https://archive.apache.org/dist/maven/maven-3/3.9.6/binaries/apache-maven-3.9.6-bin.tar.gz | tar -xz -C /opt && \ + ln -s /opt/apache-maven-3.9.6 /opt/maven && \ + ln -s /opt/maven/bin/mvn /usr/local/bin/mvn +ENV PATH=/opt/maven/bin:$PATH ENV GOSU_VERSION 1.14 # https://github.com/tianon/gosu/releases/tag/1.14 # key https://keys.openpgp.org/search?q=tianon%40debian.org diff --git a/dockerfiles/aws/java/Dockerfile.function b/dockerfiles/aws/java/Dockerfile.function index 07ae2f1c..bc20eb68 100644 --- a/dockerfiles/aws/java/Dockerfile.function +++ b/dockerfiles/aws/java/Dockerfile.function @@ -5,11 +5,23 @@ ENV JAVA_VERSION=${VERSION} ARG TARGET_ARCHITECTURE COPY . function/ -WORKDIR /function # Ensure packaged jar is present for the Lambda base image -RUN if [ -d "target" ] && ls target/*.jar >/dev/null 2>&1; then \ - cp target/*.jar function.jar; \ +# function.jar should exist (created by java_installer.sh), but if not, copy from target/ +# Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) +RUN if [ -f "function/function.jar" ]; then \ + cp function/function.jar function.jar; \ + elif [ -d "function/target" ] && ls function/target/*.jar >/dev/null 2>&1; then \ + JAR_FILE=$(ls function/target/*.jar 2>/dev/null | grep -v "original-" | head -n1); \ + if [ -z "$JAR_FILE" ]; then \ + JAR_FILE=$(ls function/target/*.jar | head -n1); \ + fi; \ + cp "$JAR_FILE" function.jar; \ + else \ + echo "Error: function.jar not found"; \ + echo "Contents of function/:"; \ + ls -la function/ 2>/dev/null || true; \ + exit 1; \ fi \ && test -f function.jar diff --git a/dockerfiles/java_installer.sh b/dockerfiles/java_installer.sh index 0bc1d953..d2ff97a9 100644 --- a/dockerfiles/java_installer.sh +++ b/dockerfiles/java_installer.sh @@ -4,14 +4,32 @@ set -euo pipefail cd /mnt/function -if [[ -f "pom.xml" ]]; then +# Find pom.xml recursively +POM_PATH=$(find . -maxdepth 3 -name "pom.xml" | head -n1) + +if [[ -n "${POM_PATH}" ]]; then + echo "Found pom.xml at ${POM_PATH}" + POM_DIR=$(dirname "${POM_PATH}") + cd "${POM_DIR}" + # Note: -q flag causes issues in Docker, removed for reliable builds mvn -DskipTests clean package if ls target/*.jar >/dev/null 2>&1; then - JAR_PATH=$(ls target/*.jar | head -n1) - cp "${JAR_PATH}" function.jar + # Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) + # The shaded JAR contains all dependencies and is the one we want to use + JAR_PATH=$(ls target/*.jar 2>/dev/null | grep -v "original-" | head -n1) + if [[ -z "${JAR_PATH}" ]]; then + # Fallback to any JAR if no non-original JAR found + JAR_PATH=$(ls target/*.jar | head -n1) + fi + echo "Found built jar at ${JAR_PATH}" + cp "${JAR_PATH}" /mnt/function/function.jar fi + + cd /mnt/function +else + echo "No pom.xml found!" fi if [[ -f "${SCRIPT_FILE:-}" ]]; then From 4d9e184d9277e0cde7ddd3e8fc26bd909b17e5b5 Mon Sep 17 00:00:00 2001 From: Alexander Schlieper Date: Thu, 8 Jan 2026 15:59:29 +0100 Subject: [PATCH 44/82] Update build and deployment configurations - Modified `java_installer.sh` to use `mvn clean package` for more reliable builds. - Updated `Dockerfile.function` to unzip the function JAR and remove the original after extraction. --- benchmarks/100.webapps/110.dynamic-html/java/pom.xml | 1 + dockerfiles/aws/java/Dockerfile.function | 4 +++- sebs/experiments/perf_cost.py | 10 ++++++---- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml index 5410ade8..15724a2f 100644 --- a/benchmarks/100.webapps/110.dynamic-html/java/pom.xml +++ b/benchmarks/100.webapps/110.dynamic-html/java/pom.xml @@ -20,6 +20,7 @@ + function diff --git a/dockerfiles/aws/java/Dockerfile.function b/dockerfiles/aws/java/Dockerfile.function index bc20eb68..aa94525b 100644 --- a/dockerfiles/aws/java/Dockerfile.function +++ b/dockerfiles/aws/java/Dockerfile.function @@ -23,6 +23,8 @@ RUN if [ -f "function/function.jar" ]; then \ ls -la function/ 2>/dev/null || true; \ exit 1; \ fi \ - && test -f function.jar + && test -f function.jar \ + && (unzip function.jar || jar xf function.jar) \ + && rm function.jar CMD ["org.serverlessbench.Handler::handleRequest"] diff --git a/sebs/experiments/perf_cost.py b/sebs/experiments/perf_cost.py index faa669a5..cdb8f330 100644 --- a/sebs/experiments/perf_cost.py +++ b/sebs/experiments/perf_cost.py @@ -499,10 +499,12 @@ def process( for func in experiments.functions(): for id, invoc in experiments.invocations(func).items(): # FIXME: compatibility with old results - if "output" in invoc.output["result"]: - del invoc.output["result"]["output"] - elif "result" in invoc.output["result"]: - del invoc.output["result"]["result"] + # Only process if result is a dict (some languages return primitives directly) + if isinstance(invoc.output["result"], dict): + if "output" in invoc.output["result"]: + del invoc.output["result"]["output"] + elif "result" in invoc.output["result"]: + del invoc.output["result"]["result"] name, extension = os.path.splitext(f) with open( From 6e5ca67052f42da67317f4a72494b050da7f5590 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 15:11:48 +0100 Subject: [PATCH 45/82] [aws][java] Unify JAR names --- dockerfiles/java_installer.sh | 15 ++++----------- sebs/aws/aws.py | 2 +- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/dockerfiles/java_installer.sh b/dockerfiles/java_installer.sh index d2ff97a9..7fe5b815 100644 --- a/dockerfiles/java_installer.sh +++ b/dockerfiles/java_installer.sh @@ -15,17 +15,10 @@ if [[ -n "${POM_PATH}" ]]; then # Note: -q flag causes issues in Docker, removed for reliable builds mvn -DskipTests clean package - if ls target/*.jar >/dev/null 2>&1; then - # Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) - # The shaded JAR contains all dependencies and is the one we want to use - JAR_PATH=$(ls target/*.jar 2>/dev/null | grep -v "original-" | head -n1) - if [[ -z "${JAR_PATH}" ]]; then - # Fallback to any JAR if no non-original JAR found - JAR_PATH=$(ls target/*.jar | head -n1) - fi - echo "Found built jar at ${JAR_PATH}" - cp "${JAR_PATH}" /mnt/function/function.jar - fi + # Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) + # The shaded JAR contains all dependencies and is the one we want to use + JAR_PATH=target/function.jar + cp "${JAR_PATH}" /mnt/function/function.jar cd /mnt/function else diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 826bd153..a76547cb 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -219,7 +219,7 @@ def package_code( """ if language == Language.JAVA: - jar_path = os.path.join(directory, "target", "benchmark-1.0.jar") + jar_path = os.path.join(directory, "target", "function.jar") bytes_size = os.path.getsize(jar_path) mbytes = bytes_size / 1024.0 / 1024.0 if not os.path.exists(jar_path): From b10510de3344dd84c67addbbdc410cda0533f72e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 15:57:57 +0100 Subject: [PATCH 46/82] [aws][java] Update Docker base file --- dockerfiles/aws/java/Dockerfile.function | 37 ++++++++++++++---------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/dockerfiles/aws/java/Dockerfile.function b/dockerfiles/aws/java/Dockerfile.function index aa94525b..0fc11ab8 100644 --- a/dockerfiles/aws/java/Dockerfile.function +++ b/dockerfiles/aws/java/Dockerfile.function @@ -1,30 +1,35 @@ ARG BASE_IMAGE -FROM $BASE_IMAGE +FROM $BASE_IMAGE as builder ARG VERSION ENV JAVA_VERSION=${VERSION} ARG TARGET_ARCHITECTURE +RUN yum install -y shadow-utils unzip tar gzip zip +# Install Maven 3.x (maven package may be old, install from Apache directly) +RUN curl -fsSL https://archive.apache.org/dist/maven/maven-3/3.9.6/binaries/apache-maven-3.9.6-bin.tar.gz | tar -xz -C /opt && \ + ln -s /opt/apache-maven-3.9.6 /opt/maven && \ + ln -s /opt/maven/bin/mvn /usr/local/bin/mvn +ENV PATH=/opt/maven/bin:$PATH + COPY . function/ # Ensure packaged jar is present for the Lambda base image # function.jar should exist (created by java_installer.sh), but if not, copy from target/ # Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) -RUN if [ -f "function/function.jar" ]; then \ - cp function/function.jar function.jar; \ - elif [ -d "function/target" ] && ls function/target/*.jar >/dev/null 2>&1; then \ - JAR_FILE=$(ls function/target/*.jar 2>/dev/null | grep -v "original-" | head -n1); \ - if [ -z "$JAR_FILE" ]; then \ - JAR_FILE=$(ls function/target/*.jar | head -n1); \ - fi; \ - cp "$JAR_FILE" function.jar; \ +RUN POM_PATH=$(find function/ -maxdepth 3 -name "pom.xml" | head -n1) && \ + if [[ -n "${POM_PATH}" ]]; then \ + mvn -f ${POM_PATH} -DskipTests clean package && \ + POM_DIR=$(dirname "${POM_PATH}") && \ + cp "${POM_DIR}"/target/function.jar function.jar && \ + (unzip function.jar || jar fx function.jar ) && \ + rm function.jar; \ else \ - echo "Error: function.jar not found"; \ - echo "Contents of function/:"; \ - ls -la function/ 2>/dev/null || true; \ + echo "No pom.xml found!" && \ exit 1; \ - fi \ - && test -f function.jar \ - && (unzip function.jar || jar xf function.jar) \ - && rm function.jar + fi + +FROM $BASE_IMAGE + +COPY --from=builder ${LAMBDA_TASK_ROOT}/ ${LAMBDA_TASK_ROOT}/ CMD ["org.serverlessbench.Handler::handleRequest"] From 24c6bf3fc2ac92b19fa403062fcd9a3defbf1a87 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 15:58:14 +0100 Subject: [PATCH 47/82] [system] Remove unnecessary assertion --- sebs/benchmark.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index a3e0714d..4c2f8b1a 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -284,7 +284,6 @@ def code_location(self) -> str | None: return os.path.join(self._cache_client.cache_dir, self.code_package["location"]) return None else: - assert self._code_location is not None return self._code_location @property From 4114db14f8743b55d590e644016f9e125eae9ebf Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 16:12:25 +0100 Subject: [PATCH 48/82] [docs] Update list of Java-supported benchmarks --- docs/benchmarks.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/benchmarks.md b/docs/benchmarks.md index 6af3256e..00439070 100644 --- a/docs/benchmarks.md +++ b/docs/benchmarks.md @@ -2,8 +2,8 @@ | Type | Benchmark | Languages | Architecture | Description | | :--- | :---: | :---: | :---: | :---: | -| Webapps | 010.sleep | Python, Node.js, C++ | x64, arm64 | Customizable sleep microbenchmark. | -| Webapps | 110.dynamic-html | Python, Node.js | x64, arm64 | Generate dynamic HTML from a template. | +| Webapps | 010.sleep | Python, Node.js, C++, Java | x64, arm64 | Customizable sleep microbenchmark. | +| Webapps | 110.dynamic-html | Python, Node.js, Java | x64, arm64 | Generate dynamic HTML from a template. | | Webapps | 120.uploader | Python, Node.js | x64, arm64 | Uploader file from provided URL to cloud storage. | | Webapps | 130.crud-api | Python | x64, arm64 | Simple CRUD application using NoSQL to store application data. | | Multimedia | 210.thumbnailer | Python, Node.js, C++ | x64, arm64 | Generate a thumbnail of an image. | From cbd5fecefa4dab1f89d89f707509138e14545b76 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 16:13:05 +0100 Subject: [PATCH 49/82] [system] Fix base images for Java on AWS --- config/systems.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/config/systems.json b/config/systems.json index f26d94a0..aec44361 100644 --- a/config/systems.json +++ b/config/systems.json @@ -125,12 +125,12 @@ "java": { "base_images": { "x64": { - "17": "public.ecr.aws/lambda/java:17", - "11": "amazon/aws-lambda-java:11" + "17": "amazon/aws-lambda-java:17.2026.02.28.00-x86_64", + "11": "amazon/aws-lambda-java:11.2026.02.28.00-x86_64" }, "arm64": { - "17": "public.ecr.aws/lambda/java:17", - "11": "amazon/aws-lambda-java:11" + "17": "amazon/aws-lambda-java:17.2026.02.28.00-arm64", + "11": "amazon/aws-lambda-java:11.2026.02.28.00-arm64" } }, "images": [ From a879d3a14e7a8a546ca0d8305533783fe807c61a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 16:26:15 +0100 Subject: [PATCH 50/82] [benchmarks] Unify Java names --- benchmarks/000.microbenchmarks/010.sleep/java/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/benchmarks/000.microbenchmarks/010.sleep/java/pom.xml b/benchmarks/000.microbenchmarks/010.sleep/java/pom.xml index 9947e6bb..5579db12 100644 --- a/benchmarks/000.microbenchmarks/010.sleep/java/pom.xml +++ b/benchmarks/000.microbenchmarks/010.sleep/java/pom.xml @@ -17,6 +17,7 @@ + function From ca27decea5b88f991796385b227a3fea3c8a741c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 16:42:36 +0100 Subject: [PATCH 51/82] [azure] Minor fix to gracefully quit when credentials are expired --- sebs/azure/config.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sebs/azure/config.py b/sebs/azure/config.py index 5e20ea75..758e8da0 100644 --- a/sebs/azure/config.py +++ b/sebs/azure/config.py @@ -200,7 +200,10 @@ def serialize(self) -> dict: Returns: Dictionary containing serialized credential data. """ - out = {"subscription_id": self.subscription_id} + if self._subscription_id is not None: + out = {"subscription_id": self.subscription_id} + else: + out = {} return out def update_cache(self, cache_client: Cache) -> None: From b757773c07aa93e369925a556567eb9b3ac4bf11 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 16:54:33 +0100 Subject: [PATCH 52/82] [docs] Additional warning for creating Azure accounts --- docs/platforms.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/platforms.md b/docs/platforms.md index 8f8dfb95..3e32f6c6 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -129,6 +129,9 @@ or in the JSON input configuration: > [!WARNING] > The tool assumes there is only one subscription active on the account. If you want to bind the newly created service principal to a specific subscription, or the created credentials do not work with SeBS and you see errors such as "No subscriptions found for X", then you must specify a subscription when creating the service principal. Check your subscription ID on in the Azure portal, and use the CLI option `tools/create_azure_credentials.py --subscription `. +> [!WARNING] +> Sometimes there's a delay within Azure platform that causes properties like subscription assignment. If you keep seeing the error "No subscription found", then wait for a few minutes before trying agian. + > [!WARNING] > When you log in for the first time on a device, Microsoft might require authenticating your login with Multi-Factor Authentication (MFA). In this case, we will return an error such as: "The following tenants require Multi-Factor Authentication (MFA). Use 'az login --tenant TENANT_ID' to explicitly login to a tenant.". Then, you can pass the tenant ID by using the `--tenant ` flag. From da68363f7fe3662ac83f402c5a2be0913512b6f2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 17:05:49 +0100 Subject: [PATCH 53/82] [aws] Simplify Java cold start tracker --- .../org/serverlessbench/ColdStartTracker.java | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java index 06a6572f..d611d0b7 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -9,7 +9,6 @@ final class ColdStartTracker { - private static final AtomicBoolean COLD = new AtomicBoolean(true); private static final Path MARKER = Path.of("/tmp/cold_run"); private static String containerId = null; @@ -17,19 +16,15 @@ private ColdStartTracker() {} static boolean isCold() { if (Files.exists(MARKER)) { - COLD.set(false); return false; } - boolean first = COLD.getAndSet(false); - if (first) { - try { - containerId = UUID.randomUUID().toString().substring(0, 8); - Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); - } catch (IOException ignored) { - // best-effort marker write - } + try { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); + } catch (IOException ignored) { + // best-effort marker write } - return first; + return true; } static String getContainerId() { From ca4dc29292b5384698cee7b9334fe888cab4bc38 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 17:06:05 +0100 Subject: [PATCH 54/82] [azure] Simplify and normalize Azure wrappers for Java --- benchmarks/wrappers/azure/java/pom.xml | 71 ------------------- .../org/serverlessbench/ColdStartTracker.java | 22 ++++-- .../org/serverlessbench/FunctionInvoker.java | 8 +-- .../java/org/serverlessbench/Handler.java | 49 +++++++++---- 4 files changed, 55 insertions(+), 95 deletions(-) delete mode 100644 benchmarks/wrappers/azure/java/pom.xml diff --git a/benchmarks/wrappers/azure/java/pom.xml b/benchmarks/wrappers/azure/java/pom.xml deleted file mode 100644 index 195df616..00000000 --- a/benchmarks/wrappers/azure/java/pom.xml +++ /dev/null @@ -1,71 +0,0 @@ - - 4.0.0 - org.serverlessbench - function - 1.0.0 - - 17 - 17 - - - - com.microsoft.azure.functions - azure-functions-java-library - 3.0.0 - - - com.fasterxml.jackson.core - jackson-databind - 2.17.1 - - - - function - - - com.microsoft.azure - azure-functions-maven-plugin - 1.31.0 - - unused - unused - westeurope - - linux - 17 - - - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.5.1 - - - package - - shade - - - false - - - *:* - - META-INF/*.SF - META-INF/*.RSA - META-INF/*.DSA - - - - - - - - - - diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java index fbedaa20..d611d0b7 100644 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -9,8 +9,8 @@ final class ColdStartTracker { - private static final AtomicBoolean WORKER_COLD = new AtomicBoolean(true); private static final Path MARKER = Path.of("/tmp/cold_run"); + private static String containerId = null; private ColdStartTracker() {} @@ -19,15 +19,27 @@ static boolean isCold() { return false; } try { - Files.writeString( - MARKER, UUID.randomUUID().toString().substring(0, 8), StandardCharsets.UTF_8); + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); } catch (IOException ignored) { // best-effort marker write } return true; } - static boolean isWorkerCold() { - return WORKER_COLD.getAndSet(false); + static String getContainerId() { + if (containerId == null) { + try { + if (Files.exists(MARKER)) { + containerId = Files.readString(MARKER, StandardCharsets.UTF_8); + } else { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); + } + } catch (IOException e) { + containerId = UUID.randomUUID().toString().substring(0, 8); + } + } + return containerId; } } diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java index 7d9c8357..9c502f86 100644 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java @@ -24,13 +24,13 @@ static Map invoke(Map input) { return casted; } } catch (ClassNotFoundException e) { - return defaultResponse("Function implementation not found"); + throw new RuntimeException("Function implementation not found"); } catch (NoSuchMethodException e) { - return defaultResponse("Function.handler(Map) missing"); + throw new RuntimeException("Function.handler(Map) missing"); } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - return defaultResponse("Failed to invoke function: " + e.getMessage()); + throw new RuntimeException("Failed to invoke function: " + e.getMessage()); } - return defaultResponse("Function returned unsupported type"); + throw new RuntimeException("Function returned unsupported type"); } private static Map defaultResponse(String message) { diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java index 8f9b3e1c..5484b6ff 100644 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java @@ -17,33 +17,45 @@ public class Handler { @FunctionName("handler") public HttpResponseMessage handleRequest( - @HttpTrigger( - name = "req", - methods = {HttpMethod.GET, HttpMethod.POST}, - authLevel = AuthorizationLevel.ANONYMOUS) - final HttpRequestMessage> request, - final ExecutionContext context) { + @HttpTrigger( + name = "req", + methods = {HttpMethod.GET, HttpMethod.POST}, + authLevel = AuthorizationLevel.ANONYMOUS + ) + final HttpRequestMessage> request, + final ExecutionContext context + ) { + long beginMs = System.currentTimeMillis(); long beginNs = System.nanoTime(); Map normalized = normalizeRequest(request); Map result = FunctionInvoker.invoke(normalized); long endNs = System.nanoTime(); + long endMs = System.currentTimeMillis(); + + // Format timestamps as "seconds.microseconds" like Python + String beginStr = formatTimestamp(beginMs, beginNs); + String endStr = formatTimestamp(endMs, endNs); + + // Get or create container ID + String containerId = ColdStartTracker.getContainerId(); + + // Get cold_start environment variable if present + String coldStartVar = System.getenv("cold_start"); + if (coldStartVar == null) { + coldStartVar = ""; + } Map body = new HashMap<>(); - body.put("begin", beginNs / 1_000_000_000.0); - body.put("end", endNs / 1_000_000_000.0); - body.put("compute_time", (endNs - beginNs) / 1_000.0); + body.put("begin", beginStr); + body.put("end", endStr); body.put("results_time", 0); body.put("result", result); body.put("is_cold", ColdStartTracker.isCold()); - body.put("is_cold_worker", ColdStartTracker.isWorkerCold()); + body.put("container_id", containerId); + body.put("cold_start_var", coldStartVar); body.put("request_id", context != null ? context.getInvocationId() : ""); - String coldStartVar = System.getenv("cold_start"); - if (coldStartVar != null) { - body.put("cold_start_var", coldStartVar); - } - String json = toJson(body); return request .createResponseBuilder(HttpStatus.OK) @@ -52,6 +64,13 @@ public HttpResponseMessage handleRequest( .build(); } + private String formatTimestamp(long epochMillis, long nanoTime) { + long seconds = epochMillis / 1000; + // Use nanos for microseconds precision + long microseconds = (nanoTime / 1000) % 1_000_000; + return String.format("%d.%06d", seconds, microseconds); + } + private Map normalizeRequest(HttpRequestMessage> request) { if (request == null) { return new HashMap<>(); From 1f6462d94d9920062e132c1ae11546b9313be3a2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 17:16:36 +0100 Subject: [PATCH 55/82] [azure] Additional check for failed logins --- sebs/azure/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sebs/azure/config.py b/sebs/azure/config.py index 758e8da0..e3f3dbcd 100644 --- a/sebs/azure/config.py +++ b/sebs/azure/config.py @@ -170,7 +170,8 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Creden old_subscription_id: Optional[str] = None # Load cached values if cached_config and "credentials" in cached_config: - old_subscription_id = cached_config["credentials"]["subscription_id"] + if "subscription_id" in cached_config["credentials"]: + old_subscription_id = cached_config["credentials"]["subscription_id"] # Check for new config if "credentials" in config and "appId" in config["credentials"]: From d6969342ee5c3ce1359f2b7a61cab80d5700f739 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 17:19:46 +0100 Subject: [PATCH 56/82] [system] Update regression to correctly recognize platform capabilities We always correctly set deployment method and CPU architecture. However, we didn't do it for initial platform init, we could cause errors - if user-provided configuration set "arm64", we would fail in creating Azure instance. --- sebs/regression.py | 238 +++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 229 insertions(+), 9 deletions(-) diff --git a/sebs/regression.py b/sebs/regression.py index 75956095..46b25ba5 100644 --- a/sebs/regression.py +++ b/sebs/regression.py @@ -50,6 +50,8 @@ benchmarks_nodejs = ["010.sleep", "110.dynamic-html", "120.uploader", "210.thumbnailer"] +benchmarks_java = ["010.sleep", "110.dynamic-html"] + benchmarks_cpp = [ "010.sleep", "210.thumbnailer", @@ -310,10 +312,15 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): deployment_name = "aws" assert cloud_config, "Cloud configuration is required" + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create a log file name based on test parameters f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), ) @@ -359,10 +366,15 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): deployment_name = "aws" assert cloud_config, "Cloud configuration is required" + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create a log file name based on test parameters f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), ) @@ -400,9 +412,14 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): deployment_name = "aws" assert cloud_config, "Cloud configuration is required" + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), ) with AWSTestSequenceCpp.lock: @@ -410,6 +427,57 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): return deployment_client +class AWSTestSequenceJava( + unittest.TestCase, + metaclass=TestSequenceMeta, + benchmarks=benchmarks_java, + architectures=architectures_aws, + deployments=deployments_aws, + deployment_name="aws", + triggers=[Trigger.TriggerType.LIBRARY, Trigger.TriggerType.HTTP], +): + """Test suite for Java benchmarks on AWS Lambda. + + Attributes: + benchmarks: List of Java benchmarks to test + architectures: List of AWS architectures to test (x64, arm64) + deployments: List of deployment types to test (package, container) + deployment_name: Cloud provider name ("aws") + triggers: List of trigger types to test (LIBRARY, HTTP) + """ + + def get_deployment(self, benchmark_name, architecture, deployment_type): + """Get an AWS deployment client for the specified configuration. + + Args: + benchmark_name: Name of the benchmark to deploy + architecture: Architecture to deploy on (x64, arm64) + deployment_type: Deployment type (package, container) + + Returns: + An initialized AWS deployment client + + Raises: + AssertionError: If cloud_config is not set + """ + deployment_name = "aws" + assert cloud_config, "Cloud configuration is required" + + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + + f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" + deployment_client = self.client.get_deployment( + config_copy, + logging_filename=os.path.join(self.client.output_dir, f), + ) + with AWSTestSequenceJava.lock: + deployment_client.initialize(resource_prefix="regr") + return deployment_client + + class AzureTestSequencePython( unittest.TestCase, metaclass=TestSequenceMeta, @@ -468,11 +536,16 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): self.client.config, self.client.docker_client ) + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create log file name and get deployment client f = f"regression_{deployment_name}_{benchmark_name}_" f += f"{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), deployment_config=AzureTestSequencePython.cfg, ) @@ -540,11 +613,16 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): self.client.config, self.client.docker_client ) + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create log file name and get deployment client f = f"regression_{deployment_name}_{benchmark_name}_" f += f"{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), deployment_config=AzureTestSequenceNodejs.cfg, ) @@ -555,6 +633,74 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): return deployment_client +class AzureTestSequenceJava( + unittest.TestCase, + metaclass=TestSequenceMeta, + benchmarks=benchmarks_java, + architectures=architectures_azure, + deployments=deployments_azure, + deployment_name="azure", + triggers=[Trigger.TriggerType.HTTP], +): + """Test suite for Java benchmarks on Azure Functions. + + Attributes: + benchmarks: List of Java benchmarks to test + architectures: List of Azure architectures to test (x64) + deployments: List of deployment types to test (package) + deployment_name: Cloud provider name ("azure") + triggers: List of trigger types to test (HTTP) + """ + + def get_deployment(self, benchmark_name, architecture, deployment_type): + """Get an Azure deployment client for the specified configuration. + + Args: + benchmark_name: Name of the benchmark to deploy + architecture: Architecture to deploy on (x64) + deployment_type: Deployment type (package) + + Returns: + An initialized Azure deployment client + + Raises: + AssertionError: If cloud_config is not set + """ + deployment_name = "azure" + assert cloud_config, "Cloud configuration is required" + + with AzureTestSequenceJava.lock: + # Cache the deployment configuration for reuse across tests + if not AzureTestSequenceJava.cfg: + AzureTestSequenceJava.cfg = self.client.get_deployment_config( + cloud_config["deployment"], + logging_filename=f"regression_{deployment_name}_{benchmark_name}.log", + ) + + # Initialize Azure CLI if not already done + if not hasattr(AzureTestSequenceJava, "cli"): + AzureTestSequenceJava.cli = AzureCLI(self.client.config, self.client.docker_client) + + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + + # Create log file name and get deployment client + f = f"regression_{deployment_name}_{benchmark_name}_" + f += f"{architecture}_{deployment_type}.log" + deployment_client = self.client.get_deployment( + config_copy, + logging_filename=os.path.join(self.client.output_dir, f), + deployment_config=AzureTestSequenceJava.cfg, + ) + + # Initialize CLI and setup resources (no login needed - reuses previous session) + deployment_client.system_resources.initialize_cli(cli=AzureTestSequenceJava.cli) + deployment_client.initialize(resource_prefix="regr") + return deployment_client + + class GCPTestSequencePython( unittest.TestCase, metaclass=TestSequenceMeta, @@ -591,10 +737,15 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): deployment_name = "gcp" assert cloud_config, "Cloud configuration is required" + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create log file name based on test parameters f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), ) @@ -640,10 +791,15 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): deployment_name = "gcp" assert cloud_config, "Cloud configuration is required" + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create log file name based on test parameters f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( - cloud_config, + config_copy, logging_filename=os.path.join(self.client.output_dir, f), ) @@ -696,8 +852,8 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): # Create a copy of the config and set architecture and deployment type config_copy = copy.deepcopy(cloud_config) config_copy["experiments"]["architecture"] = architecture - config_copy["experiments"]["container_deployment"] = deployment_type == "container" + # Create log file name based on test parameters f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" deployment_client = self.client.get_deployment( @@ -752,7 +908,7 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): assert cloud_config, "Cloud configuration is required" # Create a copy of the config and set architecture and deployment type - config_copy = cloud_config.copy() + config_copy = copy.deepcopy(cloud_config) config_copy["experiments"]["architecture"] = architecture config_copy["experiments"]["container_deployment"] = deployment_type == "container" @@ -769,6 +925,60 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): return deployment_client +class OpenWhiskTestSequenceJava( + unittest.TestCase, + metaclass=TestSequenceMeta, + benchmarks=benchmarks_java, + architectures=architectures_openwhisk, + deployments=deployments_openwhisk, + deployment_name="openwhisk", + triggers=[Trigger.TriggerType.HTTP], +): + """Test suite for Java benchmarks on OpenWhisk. + + Attributes: + benchmarks: List of Java benchmarks to test + architectures: List of OpenWhisk architectures to test (x64) + deployments: List of deployment types to test (container) + deployment_name: Cloud provider name ("openwhisk") + triggers: List of trigger types to test (HTTP) + """ + + def get_deployment(self, benchmark_name, architecture, deployment_type): + """Get an OpenWhisk deployment client for the specified configuration. + + Args: + benchmark_name: Name of the benchmark to deploy + architecture: Architecture to deploy on (x64) + deployment_type: Deployment type (container) + + Returns: + An initialized OpenWhisk deployment client + + Raises: + AssertionError: If cloud_config is not set + """ + deployment_name = "openwhisk" + assert cloud_config, "Cloud configuration is required" + + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + + # Create log file name based on test parameters + f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" + deployment_client = self.client.get_deployment( + config_copy, + logging_filename=os.path.join(self.client.output_dir, f), + ) + + # Synchronize resource initialization with a lock + with OpenWhiskTestSequenceJava.lock: + deployment_client.initialize(resource_prefix="regr") + return deployment_client + + # Stream result handler for concurrent test execution # Based on https://stackoverflow.com/questions/22484805/ # a-simple-working-example-for-testtools-concurrentstreamtestsuite @@ -932,6 +1142,8 @@ def regression_suite( suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSTestSequencePython)) elif language == "nodejs": suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSTestSequenceNodejs)) + elif language == "java": + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSTestSequenceJava)) elif language == "cpp": suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSTestSequenceCpp)) # Add GCP tests if requested @@ -953,6 +1165,8 @@ def regression_suite( suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AzureTestSequencePython)) elif language == "nodejs": suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AzureTestSequenceNodejs)) + elif language == "java": + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AzureTestSequenceJava)) # Add OpenWhisk tests if requested if "openwhisk" in providers: @@ -967,6 +1181,10 @@ def regression_suite( suite.addTest( unittest.defaultTestLoader.loadTestsFromTestCase(OpenWhiskTestSequenceNodejs) ) + elif language == "java": + suite.addTest( + unittest.defaultTestLoader.loadTestsFromTestCase(OpenWhiskTestSequenceJava) + ) # Prepare the list of tests to run tests = [] @@ -1022,6 +1240,8 @@ def regression_suite( AzureTestSequenceNodejs.cli.shutdown() if hasattr(AzureTestSequencePython, "cli"): AzureTestSequencePython.cli.shutdown() + if hasattr(AzureTestSequenceJava, "cli"): + AzureTestSequenceJava.cli.shutdown() # Return True if any test failed return not result.all_correct From 0ddb6c29162a145d6cbe6c3d5f1fa1e9a31df1c1 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 17:20:10 +0100 Subject: [PATCH 57/82] [dev] Linting --- sebs/experiments/perf_cost.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sebs/experiments/perf_cost.py b/sebs/experiments/perf_cost.py index cdb8f330..fddcef41 100644 --- a/sebs/experiments/perf_cost.py +++ b/sebs/experiments/perf_cost.py @@ -499,7 +499,8 @@ def process( for func in experiments.functions(): for id, invoc in experiments.invocations(func).items(): # FIXME: compatibility with old results - # Only process if result is a dict (some languages return primitives directly) + # Only process if result is a dict + # (some languages return primitives directly) if isinstance(invoc.output["result"], dict): if "output" in invoc.output["result"]: del invoc.output["result"]["output"] From 255f4523af2901dd50a66cb85a1a1012c803a245 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 20:15:15 +0100 Subject: [PATCH 58/82] [gcp] Correct GCP build behavior - observe actual build, not just function deploymen --- sebs/gcp/gcp.py | 312 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 260 insertions(+), 52 deletions(-) diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 15d95c7b..85099fe6 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -38,6 +38,7 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError import google.cloud.monitoring_v3 as monitoring_v3 +from google.cloud.devtools import cloudbuild_v1 from sebs.cache import Cache from sebs.config import SeBSConfig @@ -204,6 +205,168 @@ def format_function_name(func_name: str) -> str: func_name = func_name.replace(".", "_") return func_name + def _poll_build_status(self, build_name: str, func_name: str, timeout: int = 300) -> None: + """Poll build operation until completion or failure. + + Monitors a Cloud Build operation, waiting for it to complete successfully + or fail. Provides detailed error information if the build fails. + + Args: + build_name: Fully qualified build name from GCP API + func_name: Function name for logging purposes + timeout: Maximum time to wait in seconds (default: 300) + + Raises: + RuntimeError: If build fails or timeout is reached + """ + build_client = cloudbuild_v1.CloudBuildClient() + begin = time.time() + + while True: + build_status = build_client.get_build(name=build_name) + + if build_status.status == cloudbuild_v1.Build.Status.SUCCESS: + self.logging.info(f"Function {func_name} - build completed successfully!") + break + elif build_status.status == cloudbuild_v1.Build.Status.FAILURE: + self.logging.error(f"Failed to build function: {func_name}") + self.logging.error(f"Reasons: {build_status.failure_info.detail}") + self.logging.error(f"URL for detailed error: {build_status.log_url}") + raise RuntimeError(f"Build failed for function {func_name}!") from None + elif build_status.status in ( + cloudbuild_v1.Build.Status.CANCELLED, + cloudbuild_v1.Build.Status.TIMEOUT, + ): + self.logging.error(f"Build was cancelled or timed out for function: {func_name}") + self.logging.error(f"URL for detailed error: {build_status.log_url}") + raise RuntimeError(f"Build failed for function {func_name}!") from None + + if time.time() - begin > timeout: + self.logging.error( + f"Failed to build function: {func_name} after {timeout} seconds!" + ) + raise RuntimeError(f"Build timeout for function {func_name}!") from None + + time.sleep(3) + + def _wait_for_build_and_poll( + self, func_name: str, timeout: int = 300, poll_interval: int = 2 + ) -> bool: + """Wait for build to start, get build name, and poll until completion. + + For patch operations that don't immediately return a build name, this function + waits for the build to start, retrieves the build name from the function's + metadata, and then polls the build status. + + Args: + func_name: Name of the function being built + timeout: Maximum time to wait in seconds (default: 300) + poll_interval: Seconds between polling attempts (default: 2) + + Returns: + True if a build was found and completed successfully, False if no build was found + + Raises: + RuntimeError: If build fails + """ + full_func_name = GCP.get_full_function_name( + self.config.project_name, self.config.region, func_name + ) + begin = time.time() + build_name = None + previous_build_id = None + + # First, try to get the current build ID to compare against + try: + get_req = ( + self.function_client.projects().locations().functions().get(name=full_func_name) + ) + func_details = get_req.execute() + if "buildId" in func_details: + previous_build_id = func_details["buildId"] + except HttpError: + pass + + # Wait for build to start and get build name + self.logging.info(f"Waiting for build to start for function {func_name}...") + while build_name is None: + if time.time() - begin > timeout: + self.logging.warning( + f"No build found for {func_name} after {timeout}s - " + "might be a configuration-only update" + ) + return False + + try: + # Get function details to find the build + get_req = ( + self.function_client.projects().locations().functions().get(name=full_func_name) + ) + func_details = get_req.execute() + + # Check if there's a new build in progress + if "buildId" in func_details: + build_id = func_details["buildId"] + # Only consider it a new build if it's different from the previous one + if previous_build_id is None or build_id != previous_build_id: + # Construct build name from build ID + build_name = ( + f"projects/{self.config.project_name}/locations/" + f"{self.config.region}/builds/{build_id}" + ) + self.logging.info(f"Found build {build_id} for function {func_name}") + break + except HttpError as e: + self.logging.debug(f"Error getting function details: {e}") + + time.sleep(poll_interval) + + # Now poll the build status + if build_name: + self._poll_build_status(build_name, func_name, timeout) + return True + + return False + + def verify_deployment( + self, func_name: str, expected_version: Optional[int] = None + ) -> Tuple[bool, int]: + """Verify that function deployment is complete. + + Performs a single check to verify the function is in ACTIVE state and + optionally matches the expected version. This should be called after + build polling is complete. + + Args: + func_name: Name of the function to verify + expected_version: Optional version ID to verify (None to skip version check) + + Returns: + Tuple of (is_deployed, current_version_id) + """ + full_func_name = GCP.get_full_function_name( + self.config.project_name, self.config.region, func_name + ) + get_req = self.function_client.projects().locations().functions().get(name=full_func_name) + func_details = get_req.execute() + + is_active = func_details["status"] == "ACTIVE" + current_version = int(func_details["versionId"]) + + if expected_version is not None: + is_deployed = is_active and current_version == expected_version + else: + is_deployed = is_active + + if not is_deployed: + self.logging.warning( + f"Function {func_name} deployment verification failed: " + f"status={func_details['status']}, version={current_version}, " + f"expected_version={expected_version}" + ) + + return (is_deployed, current_version) + def package_code( self, directory: str, @@ -237,6 +400,23 @@ def package_code( Tuple of (archive_path, archive_size_bytes) """ + # Handle Java packaging - GCP accepts JAR files directly + if language == Language.JAVA: + jar_path = os.path.join(directory, "target", "function.jar") + + if not os.path.exists(jar_path): + raise RuntimeError( + f"Java artifact {jar_path} missing. " f"Ensure Java build produced the jar." + ) + + bytes_size = os.path.getsize(jar_path) + mbytes = bytes_size / 1024.0 / 1024.0 + + self.logging.info(f"Created {jar_path} archive") + self.logging.info(f"Jar archive size {mbytes:.2f} MB") + + return (jar_path, bytes_size) + CONFIG_FILES = { Language.PYTHON: ["handler.py", ".python_packages"], Language.NODEJS: ["handler.js", "node_modules"], @@ -359,7 +539,11 @@ def create_function( ), body={ "name": full_func_name, - "entryPoint": "handler", + "entryPoint": ( + "org.serverlessbench.Handler" + if code_package.language == Language.JAVA + else "handler" + ), "runtime": code_package.language_name + language_runtime.replace(".", ""), "availableMemoryMb": memory, "timeout": str(timeout) + "s", @@ -370,8 +554,20 @@ def create_function( }, ) ) - create_req.execute() - self.logging.info(f"Function {func_name} has been created!") + ret = create_req.execute() + self.logging.info(f"Function {func_name} is creating - GCP deployment is started!") + + # Poll build status until completion or failure + build_found = self._wait_for_build_and_poll(func_name) + if not build_found: + raise RuntimeError(f"No build operation found for {func_name}!") + + # Verify deployment is complete + is_deployed, _ = self.verify_deployment(func_name) + if not is_deployed: + raise RuntimeError( + f"Function {func_name} build succeeded but deployment verification failed!" + ) allow_unauthenticated_req = ( self.function_client.projects() @@ -382,7 +578,10 @@ def create_function( body={ "policy": { "bindings": [ - {"role": "roles/cloudfunctions.invoker", "members": ["allUsers"]} + { + "role": "roles/cloudfunctions.invoker", + "members": ["allUsers"], + } ] } }, @@ -458,26 +657,23 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) if trigger_type == Trigger.TriggerType.HTTP: + # Verify function is deployed (should already be done by create/update) + is_deployed, _ = self.verify_deployment(function.name) + if not is_deployed: + raise RuntimeError( + f"Function {function.name} must be deployed before creating HTTP trigger!" + ) + + # Get the HTTPS trigger URL location = self.config.region project_name = self.config.project_name full_func_name = GCP.get_full_function_name(project_name, location, function.name) - self.logging.info(f"Function {function.name} - waiting for deployment...") - our_function_req = ( + get_req = ( self.function_client.projects().locations().functions().get(name=full_func_name) ) - deployed = False - begin = time.time() - while not deployed: - status_res = our_function_req.execute() - if status_res["status"] == "ACTIVE": - deployed = True - else: - time.sleep(3) - if time.time() - begin > 300: # wait 5 minutes; TODO: make it configurable - self.logging.error(f"Failed to deploy function: {function.name}") - raise RuntimeError("Deployment timeout!") - self.logging.info(f"Function {function.name} - deployed!") - invoke_url = status_res["httpsTrigger"]["url"] + func_details = get_req.execute() + invoke_url = func_details["httpsTrigger"]["url"] + self.logging.info(f"Function {function.name} - HTTP trigger ready at {invoke_url}") trigger = HTTPTrigger(invoke_url) else: @@ -562,7 +758,11 @@ def update_function( name=full_func_name, body={ "name": full_func_name, - "entryPoint": "handler", + "entryPoint": ( + "org.serverlessbench.Handler" + if code_package.language == Language.JAVA + else "handler" + ), "runtime": code_package.language_name + language_runtime.replace(".", ""), "availableMemoryMb": function.config.memory, "timeout": str(function.config.timeout) + "s", @@ -573,22 +773,24 @@ def update_function( ) ) res = req.execute() - versionId = res["metadata"]["versionId"] - retries = 0 - last_version = -1 - while retries < 100: - is_deployed, last_version = self.is_deployed(function.name, versionId) - if not is_deployed: - time.sleep(5) - retries += 1 - else: - break - if retries > 0 and retries % 10 == 0: - self.logging.info(f"Waiting for function deployment, {retries} retries.") - if retries == 100: + + self.logging.info(f"Function {function.name} code update initiated") + + # Patch does not return buildName, need to wait for build to start + expected_version = int(res["metadata"]["versionId"]) + build_found = self._wait_for_build_and_poll(function.name) + if not build_found: + self.logging.warning( + f"No build operation found for {function.name} - " + "this is unexpected for code updates" + ) + + # Verify deployment with expected version + is_deployed, current_version = self.verify_deployment(function.name, expected_version) + if not is_deployed: raise RuntimeError( - "Failed to publish new function code after 10 attempts. " - f"Version {versionId} has not been published, last version {last_version}." + f"Failed to publish new function code. " + f"Expected version {expected_version}, current version {current_version}." ) self.logging.info("Published new function code and configuration.") @@ -711,26 +913,28 @@ def update_function_configuration( ) res = req.execute() - versionId = res["metadata"]["versionId"] - retries = 0 - last_version = -1 - while retries < 100: - is_deployed, last_version = self.is_deployed(function.name, versionId) - if not is_deployed: - time.sleep(5) - retries += 1 - else: + expected_version = int(res["metadata"]["versionId"]) + + self.logging.info(f"Function {function.name} configuration update initiated") + + # Verify deployment with expected version + # Retry a few times as version might take a moment to propagate + max_retries = 10 + for retry in range(max_retries): + is_deployed, current_version = self.verify_deployment(function.name, expected_version) + if is_deployed: break - if retries > 0 and retries % 10 == 0: - self.logging.info(f"Waiting for function deployment, {retries} retries.") - if retries == 100: + if retry < max_retries - 1: + time.sleep(2) + + if not is_deployed: raise RuntimeError( - "Failed to publish new function code after 10 attempts. " - f"Version {versionId} has not been published, last version {last_version}." + f"Failed to publish new function configuration. " + f"Expected version {expected_version}, current version {current_version}." ) self.logging.info("Published new function configuration.") - return versionId + return expected_version @staticmethod def get_full_function_name(project_name: str, location: str, func_name: str) -> str: @@ -755,7 +959,12 @@ def shutdown(self) -> None: super().shutdown() def download_metrics( - self, function_name: str, start_time: int, end_time: int, requests: Dict, metrics: Dict + self, + function_name: str, + start_time: int, + end_time: int, + requests: Dict, + metrics: Dict, ) -> None: """Download execution metrics and logs from GCP monitoring services. @@ -992,7 +1201,6 @@ def get_functions(self, code_package: Benchmark, function_names: List[str]) -> L return functions def is_deployed(self, func_name: str, versionId: int = -1) -> Tuple[bool, int]: - """Check if a function is deployed and optionally verify its version. Args: func_name: Name of the function to check From 17ba56330820cd5bff723e3b6beb084c17dedd29 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 20:37:15 +0100 Subject: [PATCH 59/82] [gcp] Corrected JAR upload for Java functions --- sebs/gcp/gcp.py | 37 ++++++++++++++++--------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 85099fe6..05827029 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -400,26 +400,18 @@ def package_code( Tuple of (archive_path, archive_size_bytes) """ - # Handle Java packaging - GCP accepts JAR files directly - if language == Language.JAVA: - jar_path = os.path.join(directory, "target", "function.jar") - - if not os.path.exists(jar_path): - raise RuntimeError( - f"Java artifact {jar_path} missing. " f"Ensure Java build produced the jar." - ) - - bytes_size = os.path.getsize(jar_path) - mbytes = bytes_size / 1024.0 / 1024.0 - - self.logging.info(f"Created {jar_path} archive") - self.logging.info(f"Jar archive size {mbytes:.2f} MB") - - return (jar_path, bytes_size) + if language == Language.CPP: + raise NotImplementedError("C++ packaging is not supported on GCP!") + """ + While for Java we produce an archive alread (JAR), + we need to pack in a zip file as their build sysstem will unzip it + and complain that it finds classes, and not a JAR. + """ CONFIG_FILES = { Language.PYTHON: ["handler.py", ".python_packages"], Language.NODEJS: ["handler.js", "node_modules"], + Language.JAVA: ["function.jar"], } HANDLER = { Language.PYTHON: ("handler.py", "main.py"), @@ -434,10 +426,12 @@ def package_code( shutil.move(file, function_dir) # rename handler function.py since in gcp it has to be caled main.py - old_name, new_name = HANDLER[language] - old_path = os.path.join(directory, old_name) - new_path = os.path.join(directory, new_name) - shutil.move(old_path, new_path) + old_path, new_path = None, None + if language in HANDLER: + old_name, new_name = HANDLER[language] + old_path = os.path.join(directory, old_name) + new_path = os.path.join(directory, new_name) + shutil.move(old_path, new_path) """ zip the whole directory (the zip-file gets uploaded to gcp later) @@ -459,7 +453,8 @@ def package_code( logging.info("Zip archive size {:2f} MB".format(mbytes)) # rename the main.py back to handler.py - shutil.move(new_path, old_path) + if new_path is not None and old_path is not None: + shutil.move(new_path, old_path) return ( os.path.join(directory, "{}.zip".format(benchmark)), From 40962a21849f81745456d646c60b68a3b57ee5b7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 21:00:46 +0100 Subject: [PATCH 60/82] [gcp] Make sure we wait for deployment to finish --- sebs/gcp/gcp.py | 120 +++++++++++++++++++++++------------------------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 05827029..d43cdd46 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -254,7 +254,7 @@ def _wait_for_build_and_poll( ) -> bool: """Wait for build to start, get build name, and poll until completion. - For patch operations that don't immediately return a build name, this function + Since GCP operations typically don't immediately return a build name, this function waits for the build to start, retrieves the build name from the function's metadata, and then polls the build status. @@ -328,44 +328,66 @@ def _wait_for_build_and_poll( return False - def verify_deployment( - self, func_name: str, expected_version: Optional[int] = None - ) -> Tuple[bool, int]: - """Verify that function deployment is complete. + def _wait_for_active_status( + self, func_name: str, expected_version: Optional[int] = None, timeout: int = 60 + ) -> int: + """Wait for function to reach ACTIVE status after build completes. - Performs a single check to verify the function is in ACTIVE state and - optionally matches the expected version. This should be called after - build polling is complete. + After a build completes, the function may be in DEPLOY_IN_PROGRESS state + for a short time. This function polls until the status becomes ACTIVE. Args: - func_name: Name of the function to verify + func_name: Name of the function to check expected_version: Optional version ID to verify (None to skip version check) + timeout: Maximum time to wait in seconds (default: 60) Returns: - Tuple of (is_deployed, current_version_id) + Current version ID of the function + + Raises: + RuntimeError: If deployment fails or timeout is reached """ full_func_name = GCP.get_full_function_name( self.config.project_name, self.config.region, func_name ) - get_req = self.function_client.projects().locations().functions().get(name=full_func_name) - func_details = get_req.execute() - - is_active = func_details["status"] == "ACTIVE" - current_version = int(func_details["versionId"]) + begin = time.time() - if expected_version is not None: - is_deployed = is_active and current_version == expected_version - else: - is_deployed = is_active + self.logging.info(f"Waiting for function {func_name} to become ACTIVE...") - if not is_deployed: - self.logging.warning( - f"Function {func_name} deployment verification failed: " - f"status={func_details['status']}, version={current_version}, " - f"expected_version={expected_version}" + while True: + get_req = ( + self.function_client.projects().locations().functions().get(name=full_func_name) ) + func_details = get_req.execute() + + status = func_details["status"] + current_version = int(func_details["versionId"]) - return (is_deployed, current_version) + if status == "ACTIVE": + # Check version if specified + if expected_version is not None and current_version != expected_version: + self.logging.warning( + f"Function {func_name} is ACTIVE but version mismatch: " + f"expected {expected_version}, got {current_version}" + ) + # Continue waiting as version might still be updating + else: + self.logging.info(f"Function {func_name} is ACTIVE (version {current_version})") + return current_version + elif status == "DEPLOY_IN_PROGRESS": + self.logging.debug(f"Function {func_name} deployment in progress...") + else: + # Unexpected status + self.logging.error(f"Function {func_name} has unexpected status: {status}") + raise RuntimeError(f"Function {func_name} deployment failed with status: {status}") + + if time.time() - begin > timeout: + raise RuntimeError( + f"Timeout waiting for function {func_name} to become ACTIVE. " + f"Current status: {status}" + ) + + time.sleep(2) def package_code( self, @@ -549,20 +571,16 @@ def create_function( }, ) ) - ret = create_req.execute() - self.logging.info(f"Function {func_name} is creating - GCP deployment is started!") + create_req.execute() + self.logging.info(f"Function {func_name} is creating - GCP build&deployment is started!") # Poll build status until completion or failure build_found = self._wait_for_build_and_poll(func_name) if not build_found: raise RuntimeError(f"No build operation found for {func_name}!") - # Verify deployment is complete - is_deployed, _ = self.verify_deployment(func_name) - if not is_deployed: - raise RuntimeError( - f"Function {func_name} build succeeded but deployment verification failed!" - ) + # Wait for deployment to become ACTIVE + self._wait_for_active_status(func_name) allow_unauthenticated_req = ( self.function_client.projects() @@ -652,13 +670,6 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) if trigger_type == Trigger.TriggerType.HTTP: - # Verify function is deployed (should already be done by create/update) - is_deployed, _ = self.verify_deployment(function.name) - if not is_deployed: - raise RuntimeError( - f"Function {function.name} must be deployed before creating HTTP trigger!" - ) - # Get the HTTPS trigger URL location = self.config.region project_name = self.config.project_name @@ -780,13 +791,8 @@ def update_function( "this is unexpected for code updates" ) - # Verify deployment with expected version - is_deployed, current_version = self.verify_deployment(function.name, expected_version) - if not is_deployed: - raise RuntimeError( - f"Failed to publish new function code. " - f"Expected version {expected_version}, current version {current_version}." - ) + # Wait for deployment to become ACTIVE with expected version + self._wait_for_active_status(function.name, expected_version) self.logging.info("Published new function code and configuration.") def _update_envs(self, full_function_name: str, envs: Dict) -> Dict: @@ -912,24 +918,12 @@ def update_function_configuration( self.logging.info(f"Function {function.name} configuration update initiated") - # Verify deployment with expected version - # Retry a few times as version might take a moment to propagate - max_retries = 10 - for retry in range(max_retries): - is_deployed, current_version = self.verify_deployment(function.name, expected_version) - if is_deployed: - break - if retry < max_retries - 1: - time.sleep(2) - - if not is_deployed: - raise RuntimeError( - f"Failed to publish new function configuration. " - f"Expected version {expected_version}, current version {current_version}." - ) + # Wait for deployment to become ACTIVE with expected version + # Configuration updates don't trigger builds but still need deployment time + current_version = self._wait_for_active_status(function.name, expected_version, timeout=60) self.logging.info("Published new function configuration.") - return expected_version + return current_version @staticmethod def get_full_function_name(project_name: str, location: str, func_name: str) -> str: From cfd7749666cff8d089f1d4176372d1b736b3446d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 21:03:22 +0100 Subject: [PATCH 61/82] [gcp] Add docker images for GCP --- config/systems.json | 24 +++++++++++++++++++++- dockerfiles/gcp/java/Dockerfile.build | 29 +++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) create mode 100644 dockerfiles/gcp/java/Dockerfile.build diff --git a/config/systems.json b/config/systems.json index aec44361..45c64459 100644 --- a/config/systems.json +++ b/config/systems.json @@ -282,7 +282,7 @@ "packages": [], "module_packages": { "storage": [ - "google-cloud-storage" + "goo gle-cloud-storage" ], "nosql": [ "google-cloud-datastore" @@ -311,6 +311,28 @@ "uuid": "3.4.0" } } + }, + "java": { + "base_images": { + "x64": { + "11": "us-central1-docker.pkg.dev/serverless-runtimes/google-18-full/runtimes/java11:deprecated-public-image-java11_20260217_11_0_RC00", + "17": "us-central1-docker.pkg.dev/serverless-runtimes/google-22-full/runtimes/java17:java17_20260215_17_0_RC00" + } + }, + "images": [ + "build" + ], + "username": "docker_user", + "deployment": { + "files": [ + "src" + ], + "packages": { + "com.google.cloud.functions:functions-framework-api": "1.1.0", + "com.fasterxml.jackson.core:jackson-databind": "2.17.1" + }, + "module_packages": {} + } } }, "images": { diff --git a/dockerfiles/gcp/java/Dockerfile.build b/dockerfiles/gcp/java/Dockerfile.build new file mode 100644 index 00000000..80ec71b9 --- /dev/null +++ b/dockerfiles/gcp/java/Dockerfile.build @@ -0,0 +1,29 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV JAVA_VERSION=${VERSION} +ENV DEBIAN_FRONTEND="noninteractive" + +USER root + +# Install Maven and build tools via apt-get (Debian-based) +RUN apt-get update && apt-get install -y \ + gosu \ + maven \ + unzip \ + zip \ + curl \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Copy shared installer scripts +RUN mkdir -p /sebs/ +COPY dockerfiles/java_installer.sh /sebs/installer.sh +COPY dockerfiles/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh /sebs/installer.sh + +# useradd and groupmod are in /usr/sbin +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] From 51d24f1aec3fb1b0c427a77feeb89796b25a3620 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 21:03:50 +0100 Subject: [PATCH 62/82] [dev] Linting --- sebs/gcp/gcp.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index d43cdd46..a9b7ed54 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -314,7 +314,7 @@ def _wait_for_build_and_poll( f"projects/{self.config.project_name}/locations/" f"{self.config.region}/builds/{build_id}" ) - self.logging.info(f"Found build {build_id} for function {func_name}") + self.logging.info(f"Found build {build_id} for function {func_name}!") break except HttpError as e: self.logging.debug(f"Error getting function details: {e}") @@ -572,7 +572,9 @@ def create_function( ) ) create_req.execute() - self.logging.info(f"Function {func_name} is creating - GCP build&deployment is started!") + self.logging.info( + f"Function {func_name} is creating - GCP build&deployment is started!" + ) # Poll build status until completion or failure build_found = self._wait_for_build_and_poll(func_name) From b58835854efa44e540f91d979bb47c82c373140c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 21:31:36 +0100 Subject: [PATCH 63/82] [dev] Remove wrong character --- config/systems.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/systems.json b/config/systems.json index 45c64459..bf69f825 100644 --- a/config/systems.json +++ b/config/systems.json @@ -282,7 +282,7 @@ "packages": [], "module_packages": { "storage": [ - "goo gle-cloud-storage" + "google-cloud-storage" ], "nosql": [ "google-cloud-datastore" From b5d62bbb3cb555243eca817533e197d854e1eda9 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 21:45:11 +0100 Subject: [PATCH 64/82] [gcp] Add wrappers for Java --- .../org/serverlessbench/ColdStartTracker.java | 45 +++++++++++ .../org/serverlessbench/FunctionInvoker.java | 41 ++++++++++ .../java/org/serverlessbench/Handler.java | 81 +++++++++++++++++++ 3 files changed, 167 insertions(+) create mode 100644 benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/ColdStartTracker.java create mode 100644 benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java create mode 100644 benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/ColdStartTracker.java new file mode 100644 index 00000000..d611d0b7 --- /dev/null +++ b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -0,0 +1,45 @@ +package org.serverlessbench; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; + +final class ColdStartTracker { + + private static final Path MARKER = Path.of("/tmp/cold_run"); + private static String containerId = null; + + private ColdStartTracker() {} + + static boolean isCold() { + if (Files.exists(MARKER)) { + return false; + } + try { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); + } catch (IOException ignored) { + // best-effort marker write + } + return true; + } + + static String getContainerId() { + if (containerId == null) { + try { + if (Files.exists(MARKER)) { + containerId = Files.readString(MARKER, StandardCharsets.UTF_8); + } else { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.writeString(MARKER, containerId, StandardCharsets.UTF_8); + } + } catch (IOException e) { + containerId = UUID.randomUUID().toString().substring(0, 8); + } + } + return containerId; + } +} diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java new file mode 100644 index 00000000..9c502f86 --- /dev/null +++ b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java @@ -0,0 +1,41 @@ +package org.serverlessbench; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.Map; + +final class FunctionInvoker { + + private static final String DEFAULT_CLASS = "function.Function"; + private static final String DEFAULT_METHOD = "handler"; + + private FunctionInvoker() {} + + static Map invoke(Map input) { + try { + Class fnClass = Class.forName(DEFAULT_CLASS); + Object instance = fnClass.getDeclaredConstructor().newInstance(); + Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); + Object result = method.invoke(instance, input); + if (result instanceof Map) { + @SuppressWarnings("unchecked") + Map casted = (Map) result; + return casted; + } + } catch (ClassNotFoundException e) { + throw new RuntimeException("Function implementation not found"); + } catch (NoSuchMethodException e) { + throw new RuntimeException("Function.handler(Map) missing"); + } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { + throw new RuntimeException("Failed to invoke function: " + e.getMessage()); + } + throw new RuntimeException("Function returned unsupported type"); + } + + private static Map defaultResponse(String message) { + Map out = new HashMap<>(); + out.put("output", message); + return out; + } +} diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java new file mode 100644 index 00000000..38e77cf2 --- /dev/null +++ b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java @@ -0,0 +1,81 @@ +package org.serverlessbench; + +import com.google.cloud.functions.HttpFunction; +import com.google.cloud.functions.HttpRequest; +import com.google.cloud.functions.HttpResponse; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.BufferedWriter; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +public class Handler implements HttpFunction { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + @Override + public void service(HttpRequest request, HttpResponse response) + throws IOException { + + long beginMs = System.currentTimeMillis(); + long beginNs = System.nanoTime(); + + // Normalize request from GCP HTTP format + Map normalized = normalizeRequest(request); + + Map result = FunctionInvoker.invoke(normalized); + + long endNs = System.nanoTime(); + long endMs = System.currentTimeMillis(); + + // Format timestamps as "seconds.microseconds" (SeBS standard) + String beginStr = formatTimestamp(beginMs, beginNs); + String endStr = formatTimestamp(endMs, endNs); + + // Get cold start info + String containerId = ColdStartTracker.getContainerId(); + String coldStartVar = System.getenv("cold_start"); + if (coldStartVar == null) { + coldStartVar = ""; + } + + Map body = new HashMap<>(); + body.put("begin", beginStr); + body.put("end", endStr); + body.put("results_time", 0); + body.put("result", result); + body.put("is_cold", ColdStartTracker.isCold()); + body.put("container_id", containerId); + body.put("cold_start_var", coldStartVar); + body.put("request_id", request.getFirstHeader("Function-Execution-Id").orElse("")); + + // Write JSON response + response.setContentType("application/json"); + response.setStatusCode(200); + BufferedWriter writer = response.getWriter(); + writer.write(MAPPER.writeValueAsString(body)); + } + + private String formatTimestamp(long epochMillis, long nanoTime) { + long seconds = epochMillis / 1000; + long microseconds = (nanoTime / 1000) % 1_000_000; + return String.format("%d.%06d", seconds, microseconds); + } + + private Map normalizeRequest(HttpRequest request) + throws IOException { + + try { + Map map = MAPPER.readValue(request.getReader(), Map.class); + if (map != null) { + return map; + } + } catch (IOException e) { + // fall through to query parameters + } + + return new HashMap<>(request.getQueryParameters()); + } +} From 52287f779dcb34396aa57c91a46096b619542b92 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 22:23:50 +0100 Subject: [PATCH 65/82] [whisk] Remove merge artifact --- sebs/openwhisk/config.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 26fe9acf..f471a0cf 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -337,7 +337,6 @@ def initialize(cfg: Config, dct: Dict[str, Any]) -> None: """ config = cast(OpenWhiskConfig, cfg) - config._region = dct["region"] config.shutdownStorage = dct["shutdownStorage"] config.removeCluster = dct["removeCluster"] From 4c9a83fb07379b4eaa01ed35efb0765ce2de8dcd Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 23:40:56 +0100 Subject: [PATCH 66/82] [whisk] Add baseline Dockerfile for Java actions --- .../openwhisk/java/Dockerfile.function | 30 +++++++++++++++++-- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/dockerfiles/openwhisk/java/Dockerfile.function b/dockerfiles/openwhisk/java/Dockerfile.function index b72ceb15..c6247ec2 100644 --- a/dockerfiles/openwhisk/java/Dockerfile.function +++ b/dockerfiles/openwhisk/java/Dockerfile.function @@ -1,8 +1,32 @@ ARG BASE_IMAGE -FROM $BASE_IMAGE -COPY . /function/ +FROM $BASE_IMAGE as builder +ARG VERSION +ENV JAVA_VERSION=${VERSION} +ARG TARGET_ARCHITECTURE + +# Install Maven 3.x (maven package may be old, install from Apache directly) +RUN curl -fsSL https://archive.apache.org/dist/maven/maven-3/3.9.6/binaries/apache-maven-3.9.6-bin.tar.gz | tar -xz -C /opt && \ + ln -s /opt/apache-maven-3.9.6 /opt/maven && \ + ln -s /opt/maven/bin/mvn /usr/local/bin/mvn +ENV PATH=/opt/maven/bin:$PATH -# RUN apt-get update && apt-get install -y maven +COPY . function/ # # Check if pom.xml exists before running Maven # RUN if [ -f ./pom.xml ]; then mvn clean install; else echo "pom.xml not found, aborting build." && exit 1; fi +# Ensure packaged jar is present for the Lambda base image +# function.jar should exist (created by java_installer.sh), but if not, copy from target/ +# Prefer the shaded/fat JAR (exclude "original" JARs created by maven-shade-plugin) +RUN POM_PATH=$(find /function -maxdepth 3 -name "pom.xml" | head -n1) && \ + if [ -n "${POM_PATH}" ]; then \ + mvn -f ${POM_PATH} -DskipTests clean package && \ + POM_DIR=$(dirname "${POM_PATH}") && \ + cp "${POM_DIR}"/target/function.jar /function/function.jar;\ + else \ + echo "No pom.xml found!" && \ + exit 1; \ + fi + +FROM $BASE_IMAGE + +COPY --from=builder /function/function.jar /function/ From eb9c88d657fb8707571f32371f45b35b43069a6e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 23:42:39 +0100 Subject: [PATCH 67/82] [whisk] Update definition of actions and dependencies --- config/systems.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/config/systems.json b/config/systems.json index bf69f825..22731770 100644 --- a/config/systems.json +++ b/config/systems.json @@ -402,7 +402,9 @@ }, "java": { "base_images": { - "8": "openwhisk/java8action" + "x64": { + "8": "openwhisk/java8action:1.20.0" + } }, "images": [ "function" @@ -410,11 +412,10 @@ "username": "docker_user", "deployment": { "files": [ - "Main.java", - "Storage.java" + "src" ], "packages": { - "minio": "8.5.9" + "com.google.code.gson:gson": "2.8.5" } } } From 87a680cbdc37c196f31deeb73ac393044a9b0f16 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 23:43:03 +0100 Subject: [PATCH 68/82] [whisk] Add baseline implementation of wrappers --- benchmarks/wrappers/openwhisk/java/Main.java | 55 ---------------- .../wrappers/openwhisk/java/Storage.java | 0 .../java/org/serverlessbench/Handler.java | 66 +++++++++++++++++++ 3 files changed, 66 insertions(+), 55 deletions(-) delete mode 100644 benchmarks/wrappers/openwhisk/java/Main.java delete mode 100644 benchmarks/wrappers/openwhisk/java/Storage.java create mode 100644 benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java diff --git a/benchmarks/wrappers/openwhisk/java/Main.java b/benchmarks/wrappers/openwhisk/java/Main.java deleted file mode 100644 index 59a33ee3..00000000 --- a/benchmarks/wrappers/openwhisk/java/Main.java +++ /dev/null @@ -1,55 +0,0 @@ -import faas.App; -import com.google.gson.JsonObject; -import java.time.Instant; -import java.time.Duration; -import java.io.File; -import java.io.IOException; - - -public class Main { - public static JsonObject main(JsonObject args) { - - App function = new App(); - - long start_nano = System.nanoTime(); - - Instant begin = Instant.now(); - JsonObject result = function.handler(args); - Instant end = Instant.now(); - - long end_nano = System.nanoTime(); - - // long computeTime = Duration.between(begin, end).toNanos() / 1000; // Convert nanoseconds to microseconds - - long computeTime = end_nano - start_nano; - boolean isCold = false; - String fileName = "/tmp/cold_run"; - - File file = new File(fileName); - if (!file.exists()) { - isCold = true; - try { - file.createNewFile(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - // Convert to Unix timestamp in seconds.microseconds - String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); // Convert nanoseconds to microseconds - String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); - - String requestId = System.getenv("__OW_ACTIVATION_ID"); - - JsonObject jsonResult = new JsonObject(); - jsonResult.addProperty("begin", formattedBegin); - jsonResult.addProperty("end", formattedEnd); - jsonResult.addProperty("request_id", requestId); - jsonResult.addProperty("compute_time", computeTime); - jsonResult.addProperty("is_cold", isCold); - jsonResult.addProperty("result", result.toString()); - return jsonResult; - } - -} - diff --git a/benchmarks/wrappers/openwhisk/java/Storage.java b/benchmarks/wrappers/openwhisk/java/Storage.java deleted file mode 100644 index e69de29b..00000000 diff --git a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java new file mode 100644 index 00000000..00d32372 --- /dev/null +++ b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java @@ -0,0 +1,66 @@ +package org.serverlessbench; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import com.google.gson.reflect.TypeToken; +import java.lang.reflect.Type; +import java.util.Map; +import java.time.Instant; +import java.time.Duration; +import java.io.File; +import java.io.IOException; + +import function.Function; + + +public class Handler { + + static Type mapType = new TypeToken>(){}.getType(); + static Gson gson = new Gson(); + + public static JsonObject main(JsonObject args) { + + Function function = new Function(); + + Instant begin = Instant.now(); + Map result = function.handler(gson.fromJson(args, mapType)); + Instant end = Instant.now(); + + boolean isCold = false; + String fileName = "/tmp/cold_run"; + + File file = new File(fileName); + if (!file.exists()) { + isCold = true; + try { + file.createNewFile(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + // Convert to Unix timestamp in seconds.microseconds + String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); + String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); + + String requestId = System.getenv("__OW_ACTIVATION_ID"); + if (requestId == null) { + requestId = ""; + } + + // Create result wrapper matching Python format + JsonObject logData = new JsonObject(); + logData.add("result", gson.toJsonTree(result).getAsJsonObject()); + + JsonObject jsonResult = new JsonObject(); + jsonResult.addProperty("begin", formattedBegin); + jsonResult.addProperty("end", formattedEnd); + jsonResult.addProperty("request_id", requestId); + jsonResult.addProperty("results_time", 0); + jsonResult.addProperty("is_cold", isCold); + jsonResult.add("result", logData); + return jsonResult; + } + +} + From 8f6fea41bfa116e490fb4e4039e7a0bf83c239d8 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 23:43:17 +0100 Subject: [PATCH 69/82] [whisk] Update configuration --- config/example.json | 2 +- config/openwhisk.json | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/config/example.json b/config/example.json index c19023e5..a87fff40 100644 --- a/config/example.json +++ b/config/example.json @@ -74,7 +74,7 @@ "wskBypassSecurity": "true", "wskExec": "wsk", "experimentalManifest": false, - "dockerhub_repository": null, + "dockerhubRepository": null, "docker_registry": { "registry": "", "username": "", diff --git a/config/openwhisk.json b/config/openwhisk.json index c41b4966..08dfaffa 100644 --- a/config/openwhisk.json +++ b/config/openwhisk.json @@ -11,10 +11,13 @@ }, "deployment": { "name": "openwhisk", - "shutdownStorage": false, - "removeCluster": false, - "wskBypassSecurity": "true", - "wskExec": "wsk", - "experimentalManifest": "false" + "openwhisk": { + "shutdownStorage": false, + "removeCluster": false, + "wskBypassSecurity": "true", + "wskExec": "wsk", + "experimentalManifest": "false", + "dockerhubRepository": "" + } } } From e4f7c50f43e4042f94cb487b83aca151b5d120ee Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 5 Mar 2026 23:49:09 +0100 Subject: [PATCH 70/82] [whisk] Support Java-based actions --- docs/platforms.md | 10 +++- sebs/openwhisk/openwhisk.py | 110 +++++++++++++++++++++++++++--------- 2 files changed, 93 insertions(+), 27 deletions(-) diff --git a/docs/platforms.md b/docs/platforms.md index 3e32f6c6..3ed915cc 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -234,6 +234,15 @@ or a Docker image with all dependencies preinstalled. However, OpenWhisk has a very low code package size limit of only 48 megabytes. So, to circumvent this limit, we deploy functions using pre-built Docker images. +> [!NOTE] +> On Python and Node.js, we create a full Docker image and upload the main handler +file only to OpenWhisk, as this is required for actions. +This is not possible on Java, as we need to compile the code into JAR. +To avoid extract build image, we build the function image, extract the function JAR, +and upload it with the action. In future, if we want to create heavy JARs with complex +dependencies, we might need to switch to full image deployment on Java as well. + + **Important**: OpenWhisk requires that all Docker images are available in the registry, even if they have been cached on a system serving OpenWhisk functions. @@ -248,7 +257,6 @@ repository on Docker Hub requires permissions. To use a different Docker Hub repository, change the key `['general']['docker_repository']` in `config/systems.json`. - Alternatively, OpenWhisk users can configure the FaaS platform to use a custom and private Docker registry and push new images there. A local Docker registry can speed up development when debugging a new function. diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 6ebfc454..a3911a3d 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -205,48 +205,101 @@ def package_code( """ Package benchmark code for OpenWhisk deployment. - Creates a a ZIP archive containing the benchmark code. + Creates a ZIP archive containing the benchmark code. The ZIP archive is required for OpenWhisk function registration even when using Docker-based deployment. It contains only the main handlers - (`__main__.py` or `index.js`). The Docker image URI is returned, - which will be used when creating the action. + (`__main__.py` or `index.js`). + + For Java, extracts the JAR from the built container image - this a + fix since we need to provide it as argument to OpenWhisk action, + but we do not want to add extra builder image. Args: directory: Path to the benchmark code directory - language: Programming language (e.g., 'python', 'nodejs') - language_version: Language version (e.g., '3.8', '14') + language: Programming language (e.g., 'python', 'nodejs', 'java') + language_version: Language version (e.g., '3.8', '14', '17') architecture: Target architecture (e.g., 'x86_64') benchmark: Benchmark name is_cached: Whether Docker image is already cached Returns: Tuple containing: - - Path to created ZIP archive - - Size of ZIP archive in bytes + - Path to created ZIP archive (or JAR for Java) + - Size of archive in bytes """ - if language != Language.JAVA: + if language == Language.JAVA: + # For Java, we need to extract the JAR from the built container + # Get the container image URI that was just built + _, _, _, image_uri = self._container_client.registry_name( + benchmark, language.value, language_version, architecture + ) + + self.logging.info(f"Extracting JAR from container image {image_uri}") + + # Run container to get the JAR file + jar_path = os.path.join(directory, "function.jar") + try: + # Create and run a temporary container + container = self.docker_client.containers.create(image_uri) + + # Copy JAR from container to build directory + # Docker API expects a path to a tar stream + import tarfile + import io + + bits, _ = container.get_archive("/function/function.jar") + + # Extract tar stream to get the file + tar_stream = io.BytesIO() + for chunk in bits: + tar_stream.write(chunk) + tar_stream.seek(0) + + with tarfile.open(fileobj=tar_stream) as tar: + # Extract function.jar from the tar + jar_member = tar.getmember("function.jar") + jar_file = tar.extractfile(jar_member) + if jar_file is None: + raise RuntimeError("Could not extract function.jar from container!") + + # Write to destination + with open(jar_path, "wb") as f: + f.write(jar_file.read()) + + # Clean up container + container.remove() + + self.logging.info(f"Extracted function JAR to {jar_path}") + bytes_size = os.path.getsize(jar_path) + self.logging.info(f"JAR size {bytes_size / 1024.0 / 1024.0:.2f} MB") + + return jar_path, bytes_size + + except Exception as e: + self.logging.error(f"Failed to extract JAR from container: {e}") + raise RuntimeError(f"Failed to extract JAR from container {image_uri}: {e}") + + else: + # For Python and Node.js, create a minimal ZIP with handlers # We deploy Minio config in code package since this depends on local - # deployment - it cannnot be a part of Docker image - # FIXME: why No file is needed for Java? + # deployment - it cannot be a part of Docker image CONFIG_FILES = { - "python": ["__main__.py"], - "nodejs": ["index.js"], + Language.PYTHON: ["__main__.py"], + Language.NODEJS: ["index.js"], } package_config = CONFIG_FILES[language] - else: - package_config = [] - benchmark_archive = os.path.join(directory, f"{benchmark}.zip") - subprocess.run( - ["zip", benchmark_archive] + package_config, - stdout=subprocess.DEVNULL, - cwd=directory, - ) - self.logging.info(f"Created {benchmark_archive} archive") - bytes_size = os.path.getsize(benchmark_archive) - self.logging.info("Zip archive size {:2f} MB".format(bytes_size / 1024.0 / 1024.0)) - return benchmark_archive, bytes_size + benchmark_archive = os.path.join(directory, f"{benchmark}.zip") + subprocess.run( + ["zip", benchmark_archive] + package_config, + stdout=subprocess.DEVNULL, + cwd=directory, + ) + self.logging.info(f"Created {benchmark_archive} archive") + bytes_size = os.path.getsize(benchmark_archive) + self.logging.info("Zip archive size {:2f} MB".format(bytes_size / 1024.0 / 1024.0)) + return benchmark_archive, bytes_size def finalize_container_build( self, @@ -381,6 +434,11 @@ def create_function( repository=self.config.dockerhub_repository, ) + code_location = code_package.code_location + if code_location is None: + raise RuntimeError( + "Code location must be set for OpenWhisk action!" + ) from None run_arguments = [ *self.get_wsk_cmd(), "action", @@ -395,10 +453,10 @@ def create_function( "--timeout", str(code_package.benchmark_config.timeout * 1000), *self.storage_arguments(code_package), - code_package.code_location, + code_location, ] if code_package.language == Language.JAVA: - run_arguments.extend(["--main", "Main"]) + run_arguments.extend(["--main", "org.serverlessbench.Handler"]) if code_package.code_location is None: raise RuntimeError( From 3ae88eb8adaf6a60a7ac9c487b97c8400b5b4b00 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 00:28:42 +0100 Subject: [PATCH 71/82] [whisk] Add dedicated cold start tracker --- .../org/serverlessbench/ColdStartTracker.java | 46 +++++++++++++++++++ .../java/org/serverlessbench/Handler.java | 17 ++----- 2 files changed, 50 insertions(+), 13 deletions(-) create mode 100644 benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/ColdStartTracker.java diff --git a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/ColdStartTracker.java b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/ColdStartTracker.java new file mode 100644 index 00000000..4a19c0ff --- /dev/null +++ b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/ColdStartTracker.java @@ -0,0 +1,46 @@ +package org.serverlessbench; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; + +final class ColdStartTracker { + + private static final Path MARKER = Paths.get("/tmp/cold_run"); + private static String containerId = null; + + private ColdStartTracker() {} + + static boolean isCold() { + if (Files.exists(MARKER)) { + return false; + } + try { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.write(MARKER, containerId.getBytes(StandardCharsets.UTF_8)); + } catch (IOException ignored) { + // best-effort marker write + } + return true; + } + + static String getContainerId() { + if (containerId == null) { + try { + if (Files.exists(MARKER)) { + containerId = new String(Files.readAllBytes(MARKER), StandardCharsets.UTF_8); + } else { + containerId = UUID.randomUUID().toString().substring(0, 8); + Files.write(MARKER, containerId.getBytes(StandardCharsets.UTF_8)); + } + } catch (IOException e) { + containerId = UUID.randomUUID().toString().substring(0, 8); + } + } + return containerId; + } +} diff --git a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java index 00d32372..2b7fee57 100644 --- a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java @@ -26,18 +26,7 @@ public static JsonObject main(JsonObject args) { Map result = function.handler(gson.fromJson(args, mapType)); Instant end = Instant.now(); - boolean isCold = false; - String fileName = "/tmp/cold_run"; - - File file = new File(fileName); - if (!file.exists()) { - isCold = true; - try { - file.createNewFile(); - } catch (IOException e) { - e.printStackTrace(); - } - } + String containerId = ColdStartTracker.getContainerId(); // Convert to Unix timestamp in seconds.microseconds String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); @@ -57,8 +46,10 @@ public static JsonObject main(JsonObject args) { jsonResult.addProperty("end", formattedEnd); jsonResult.addProperty("request_id", requestId); jsonResult.addProperty("results_time", 0); - jsonResult.addProperty("is_cold", isCold); + jsonResult.addProperty("is_cold", ColdStartTracker.isCold()); + jsonResult.addProperty("container_id", containerId); jsonResult.add("result", logData); + return jsonResult; } From a580eae79ca9368c2fdada5a2e161894d11e7cd9 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 00:29:03 +0100 Subject: [PATCH 72/82] [whisk] Make sure to pass class name when updating action --- sebs/openwhisk/openwhisk.py | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index a3911a3d..932cad70 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -537,23 +537,26 @@ def update_function( ) try: + run_arguments = [ + *self.get_wsk_cmd(), + "action", + "update", + function.name, + "--web", + "true", + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments(code_package), + code_package.code_location, + ] + if code_package.language == Language.JAVA: + run_arguments.extend(["--main", "org.serverlessbench.Handler"]) subprocess.run( - [ - *self.get_wsk_cmd(), - "action", - "update", - function.name, - "--web", - "true", - "--docker", - docker_image, - "--memory", - str(code_package.benchmark_config.memory), - "--timeout", - str(code_package.benchmark_config.timeout * 1000), - *self.storage_arguments(code_package), - code_package.code_location, - ], + run_arguments, stderr=subprocess.PIPE, stdout=subprocess.PIPE, check=True, From a25d07919997ec18218ffd9aa720948fb941cad5 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 00:37:48 +0100 Subject: [PATCH 73/82] [gcp] Add OpenWhisk Java to regression --- sebs/regression.py | 56 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/sebs/regression.py b/sebs/regression.py index 46b25ba5..27fe0109 100644 --- a/sebs/regression.py +++ b/sebs/regression.py @@ -809,6 +809,60 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): return deployment_client +class GCPTestSequenceJava( + unittest.TestCase, + metaclass=TestSequenceMeta, + benchmarks=benchmarks_java, + architectures=architectures_gcp, + deployments=deployments_gcp, + deployment_name="gcp", + triggers=[Trigger.TriggerType.HTTP], +): + """Test suite for Java benchmarks on Google Cloud Functions. + + Attributes: + benchmarks: List of Java benchmarks to test + architectures: List of GCP architectures to test (x64) + deployments: List of deployment types to test (package) + deployment_name: Cloud provider name ("gcp") + triggers: List of trigger types to test (HTTP) + """ + + def get_deployment(self, benchmark_name, architecture, deployment_type): + """Get a GCP deployment client for the specified configuration. + + Args: + benchmark_name: Name of the benchmark to deploy + architecture: Architecture to deploy on (x64) + deployment_type: Deployment type (package) + + Returns: + An initialized Google Cloud Functions deployment client + + Raises: + AssertionError: If cloud_config is not set + """ + deployment_name = "gcp" + assert cloud_config, "Cloud configuration is required" + + # Create a copy of the config and set architecture and deployment type + config_copy = copy.deepcopy(cloud_config) + config_copy["experiments"]["architecture"] = architecture + config_copy["experiments"]["container_deployment"] = deployment_type == "container" + + # Create log file name based on test parameters + f = f"regression_{deployment_name}_{benchmark_name}_{architecture}_{deployment_type}.log" + deployment_client = self.client.get_deployment( + config_copy, + logging_filename=os.path.join(self.client.output_dir, f), + ) + + # Synchronize resource initialization with a lock + with GCPTestSequenceJava.lock: + deployment_client.initialize(resource_prefix="regr") + return deployment_client + + class OpenWhiskTestSequencePython( unittest.TestCase, metaclass=TestSequenceMeta, @@ -1155,6 +1209,8 @@ def regression_suite( suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(GCPTestSequencePython)) elif language == "nodejs": suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(GCPTestSequenceNodejs)) + elif language == "java": + suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(GCPTestSequenceJava)) # Add Azure tests if requested if "azure" in providers: From 6cdb19d81d970a602d909c1aeafc70d024ff59d6 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 00:51:11 +0100 Subject: [PATCH 74/82] [docs] Update list of contributors --- CHANGELOG.md | 3 +++ README.md | 2 ++ 2 files changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9dc87eb..eb9231f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ - Dynamic dependency resolution with CMake generation - Support for Boost, OpenCV, igraph, PyTorch, hiredis libraries - C++ implementations: 010.sleep, 210.thumbnailer, 501.graph-pagerank, 503.graph-bfs, 411.image-recognition +* **Java benchmarks**: initial support for Java on all four platforms (#223), including benchmark **110.dynamic-html**. * **Python**: Updated support for Python 3.8, 3.9, 3.10, 3.11, 3.12 * **Node.js**: Updated support for Node.js 14, 16, 18, 20 @@ -104,6 +105,8 @@ This release includes contributions from: * @lawrence910426 - Colored CLI output (#141) * @alevy - Documentation improvements (#139) * @skehrli - Local memory measurements (#101) +* @mahlashrifi - Java benchmarks support (#223) +* @xSurus - improvements and extensions to Java benchmarks (#223) * And many others who contributed bug reports, testing, and feedback! ## [1.1.0](https://github.com/spcl/serverless-benchmarks/compare/v1.0...v1.1) (2022-05-30) diff --git a/README.md b/README.md index 8a896c22..16693ef1 100644 --- a/README.md +++ b/README.md @@ -187,3 +187,5 @@ To verify the correctness of installation, you can use [our regression testing]( * [Prajin Khadka](https://github.com/prajinkhadka) - contributed new language versions, container support, and ARM builds. * [Horia Mercan](https://github.com/HoriaMercan) - contributed new benchmarks in C++. * [Dillon Elste (ETH Zurich)](https://github.com/DJAntivenom) - bugfixing in C++. +* [Mahla Sharifi](https://github.com/mahlashrifi) - contributed support for Java benchmarks. +* [Alexander Schlieper (ETH Zurich)](https://github.com/xSurus) - improved support for Java benchmarks. From 499153f920e58d6a946e02c7dbc00668fa059bc8 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 00:57:47 +0100 Subject: [PATCH 75/82] [java] Replace dynamic loading of function class with a statically known path We do not need such a flexibility since the class name of every benchmark is the same --- .../org/serverlessbench/FunctionInvoker.java | 41 ------------------- .../java/org/serverlessbench/Handler.java | 4 +- .../org/serverlessbench/FunctionInvoker.java | 41 ------------------- .../java/org/serverlessbench/Handler.java | 4 +- .../org/serverlessbench/FunctionInvoker.java | 41 ------------------- .../java/org/serverlessbench/Handler.java | 4 +- 6 files changed, 9 insertions(+), 126 deletions(-) delete mode 100644 benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java delete mode 100644 benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java delete mode 100644 benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java deleted file mode 100644 index 9c502f86..00000000 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/FunctionInvoker.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.serverlessbench; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.Map; - -final class FunctionInvoker { - - private static final String DEFAULT_CLASS = "function.Function"; - private static final String DEFAULT_METHOD = "handler"; - - private FunctionInvoker() {} - - static Map invoke(Map input) { - try { - Class fnClass = Class.forName(DEFAULT_CLASS); - Object instance = fnClass.getDeclaredConstructor().newInstance(); - Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); - Object result = method.invoke(instance, input); - if (result instanceof Map) { - @SuppressWarnings("unchecked") - Map casted = (Map) result; - return casted; - } - } catch (ClassNotFoundException e) { - throw new RuntimeException("Function implementation not found"); - } catch (NoSuchMethodException e) { - throw new RuntimeException("Function.handler(Map) missing"); - } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - throw new RuntimeException("Failed to invoke function: " + e.getMessage()); - } - throw new RuntimeException("Function returned unsupported type"); - } - - private static Map defaultResponse(String message) { - Map out = new HashMap<>(); - out.put("output", message); - return out; - } -} diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java index 3cf9b84a..c03c8e72 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java @@ -3,6 +3,7 @@ import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.fasterxml.jackson.databind.ObjectMapper; +import function.Function; import java.util.HashMap; import java.util.Map; @@ -16,7 +17,8 @@ public Map handleRequest(Map event, Context cont long beginMs = System.currentTimeMillis(); long beginNs = System.nanoTime(); Map normalized = normalize(event); - Map result = FunctionInvoker.invoke(normalized); + Function function = new Function(); + Map result = function.handler(normalized); long endNs = System.nanoTime(); long endMs = System.currentTimeMillis(); diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java deleted file mode 100644 index 9c502f86..00000000 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/FunctionInvoker.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.serverlessbench; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.Map; - -final class FunctionInvoker { - - private static final String DEFAULT_CLASS = "function.Function"; - private static final String DEFAULT_METHOD = "handler"; - - private FunctionInvoker() {} - - static Map invoke(Map input) { - try { - Class fnClass = Class.forName(DEFAULT_CLASS); - Object instance = fnClass.getDeclaredConstructor().newInstance(); - Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); - Object result = method.invoke(instance, input); - if (result instanceof Map) { - @SuppressWarnings("unchecked") - Map casted = (Map) result; - return casted; - } - } catch (ClassNotFoundException e) { - throw new RuntimeException("Function implementation not found"); - } catch (NoSuchMethodException e) { - throw new RuntimeException("Function.handler(Map) missing"); - } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - throw new RuntimeException("Failed to invoke function: " + e.getMessage()); - } - throw new RuntimeException("Function returned unsupported type"); - } - - private static Map defaultResponse(String message) { - Map out = new HashMap<>(); - out.put("output", message); - return out; - } -} diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java index 5484b6ff..208fb179 100644 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java @@ -5,6 +5,7 @@ import com.microsoft.azure.functions.annotation.AuthorizationLevel; import com.microsoft.azure.functions.annotation.FunctionName; import com.microsoft.azure.functions.annotation.HttpTrigger; +import function.Function; import java.io.IOException; import java.util.HashMap; @@ -29,7 +30,8 @@ public HttpResponseMessage handleRequest( long beginMs = System.currentTimeMillis(); long beginNs = System.nanoTime(); Map normalized = normalizeRequest(request); - Map result = FunctionInvoker.invoke(normalized); + Function function = new Function(); + Map result = function.handler(normalized); long endNs = System.nanoTime(); long endMs = System.currentTimeMillis(); diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java deleted file mode 100644 index 9c502f86..00000000 --- a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/FunctionInvoker.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.serverlessbench; - -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.HashMap; -import java.util.Map; - -final class FunctionInvoker { - - private static final String DEFAULT_CLASS = "function.Function"; - private static final String DEFAULT_METHOD = "handler"; - - private FunctionInvoker() {} - - static Map invoke(Map input) { - try { - Class fnClass = Class.forName(DEFAULT_CLASS); - Object instance = fnClass.getDeclaredConstructor().newInstance(); - Method method = fnClass.getMethod(DEFAULT_METHOD, Map.class); - Object result = method.invoke(instance, input); - if (result instanceof Map) { - @SuppressWarnings("unchecked") - Map casted = (Map) result; - return casted; - } - } catch (ClassNotFoundException e) { - throw new RuntimeException("Function implementation not found"); - } catch (NoSuchMethodException e) { - throw new RuntimeException("Function.handler(Map) missing"); - } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - throw new RuntimeException("Failed to invoke function: " + e.getMessage()); - } - throw new RuntimeException("Function returned unsupported type"); - } - - private static Map defaultResponse(String message) { - Map out = new HashMap<>(); - out.put("output", message); - return out; - } -} diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java index 38e77cf2..adc0d0af 100644 --- a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java @@ -4,6 +4,7 @@ import com.google.cloud.functions.HttpRequest; import com.google.cloud.functions.HttpResponse; import com.fasterxml.jackson.databind.ObjectMapper; +import function.Function; import java.io.BufferedWriter; import java.io.IOException; @@ -25,7 +26,8 @@ public void service(HttpRequest request, HttpResponse response) // Normalize request from GCP HTTP format Map normalized = normalizeRequest(request); - Map result = FunctionInvoker.invoke(normalized); + Function function = new Function(); + Map result = function.handler(normalized); long endNs = System.nanoTime(); long endMs = System.currentTimeMillis(); From 20bb209d7539716dcf528ba83048eded48df39e3 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:09:58 +0100 Subject: [PATCH 76/82] [dev] Update mypy ignores --- .mypy.ini | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.mypy.ini b/.mypy.ini index e202650e..c1e1e295 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -36,12 +36,17 @@ ignore_missing_imports = True [mypy-google.cloud.storage] ignore_missing_imports = True +[mypy-google.cloud.devtools] +ignore_missing_imports = True + [mypy-google.api_core] ignore_missing_imports = True [mypy-googleapiclient.discovery] ignore_missing_imports = True +[mypy-googleapiclient.errors] +ignore_missing_imports = True [mypy-googleapiclient.errors] ignore_missing_imports = True From d41785e1eb7983dd0099affe4124dcc72b71a202 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:21:15 +0100 Subject: [PATCH 77/82] [system] Update examples --- config/cpp.json | 1 + config/java.json | 95 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+) create mode 100644 config/java.json diff --git a/config/cpp.json b/config/cpp.json index 838e724c..bb41a2f9 100644 --- a/config/cpp.json +++ b/config/cpp.json @@ -82,6 +82,7 @@ "wskBypassSecurity": "true", "wskExec": "wsk", "experimentalManifest": false, + "dockerhubRepository": null, "docker_registry": { "registry": "", "username": "", diff --git a/config/java.json b/config/java.json new file mode 100644 index 00000000..73c3fd97 --- /dev/null +++ b/config/java.json @@ -0,0 +1,95 @@ +{ + "experiments": { + "deployment": "aws", + "update_code": false, + "update_storage": false, + "download_results": false, + "architecture": "x64", + "container_deployment": false, + "runtime": { + "language": "java", + "version": "11" + }, + "type": "invocation-overhead", + "perf-cost": { + "benchmark": "110.dynamic-html", + "experiments": ["cold", "warm", "burst", "sequential"], + "input-size": "test", + "repetitions": 50, + "concurrent-invocations": 50, + "memory-sizes": [128, 256] + }, + "network-ping-pong": { + "invocations": 50, + "repetitions": 1000, + "threads": 1 + }, + "invocation-overhead": { + "repetitions": 5, + "N": 20, + "type": "payload", + "payload_begin": 1024, + "payload_end": 6251000, + "payload_points": 20, + "code_begin": 1048576, + "code_end": 261619712, + "code_points": 20 + }, + "eviction-model": { + "invocations": 1, + "function_copy_idx": 0, + "repetitions": 5, + "sleep": 1 + } + }, + "deployment": { + "name": "aws", + "aws": { + "region": "us-east-1", + "lambda-role": "" + }, + "azure": { + "region": "westeurope" + }, + "gcp": { + "region": "europe-west1", + "project_name": "", + "credentials": "" + }, + "local": { + "storage": { + "address": "", + "mapped_port": -1, + "access_key": "", + "secret_key": "", + "instance_id": "", + "input_buckets": [], + "output_buckets": [], + "type": "minio" + } + }, + "openwhisk": { + "shutdownStorage": false, + "removeCluster": false, + "wskBypassSecurity": "true", + "wskExec": "wsk", + "experimentalManifest": false, + "dockerhubRepository": null, + "docker_registry": { + "registry": "", + "username": "", + "password": "" + }, + "storage": { + "address": "", + "mapped_port": -1, + "access_key": "", + "secret_key": "", + "instance_id": "", + "input_buckets": [], + "output_buckets": [], + "type": "minio" + } + } + } +} From 658d4eaf33b8728b657a4f64d70313d1540127b6 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:25:02 +0100 Subject: [PATCH 78/82] [docs] Minor fixes --- docs/platforms.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/platforms.md b/docs/platforms.md index 3ed915cc..9874ca78 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -130,7 +130,7 @@ or in the JSON input configuration: > The tool assumes there is only one subscription active on the account. If you want to bind the newly created service principal to a specific subscription, or the created credentials do not work with SeBS and you see errors such as "No subscriptions found for X", then you must specify a subscription when creating the service principal. Check your subscription ID on in the Azure portal, and use the CLI option `tools/create_azure_credentials.py --subscription `. > [!WARNING] -> Sometimes there's a delay within Azure platform that causes properties like subscription assignment. If you keep seeing the error "No subscription found", then wait for a few minutes before trying agian. +> Sometimes there's a delay within Azure platform that causes properties like subscription assignment to not be propagated immediately across systems. If you keep seeing errors such "No subscription found", then wait for a few minutes before trying again. > [!WARNING] > When you log in for the first time on a device, Microsoft might require authenticating your login with Multi-Factor Authentication (MFA). In this case, we will return an error such as: "The following tenants require Multi-Factor Authentication (MFA). Use 'az login --tenant TENANT_ID' to explicitly login to a tenant.". Then, you can pass the tenant ID by using the `--tenant ` flag. From b72edb3f7b99036273e905c9543335c21f0cab8c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:34:15 +0100 Subject: [PATCH 79/82] [system] Bump submodule --- benchmarks-data | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks-data b/benchmarks-data index 6a17a460..269ac284 160000 --- a/benchmarks-data +++ b/benchmarks-data @@ -1 +1 @@ -Subproject commit 6a17a460f289e166abb47ea6298fb939e80e8beb +Subproject commit 269ac284fa3ae58f2fcb444b3a83e2255028c20f From 611a11e4e12c8cfc4e0cde41d58d38f1da2cfdbe Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:52:35 +0100 Subject: [PATCH 80/82] [java] Improve gathering of timestamps --- .../java/org/serverlessbench/Handler.java | 19 +++++++++---------- .../java/org/serverlessbench/Handler.java | 18 ++++++++---------- .../java/org/serverlessbench/Handler.java | 17 ++++++++--------- .../java/org/serverlessbench/Handler.java | 10 ++++++++-- 4 files changed, 33 insertions(+), 31 deletions(-) diff --git a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java index c03c8e72..47514ca5 100644 --- a/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/aws/java/src/main/java/org/serverlessbench/Handler.java @@ -7,6 +7,7 @@ import java.util.HashMap; import java.util.Map; +import java.time.Instant; public class Handler implements RequestHandler, Map> { @@ -14,17 +15,16 @@ public class Handler implements RequestHandler, Map handleRequest(Map event, Context context) { - long beginMs = System.currentTimeMillis(); - long beginNs = System.nanoTime(); + + Instant beginTs = Instant.now(); Map normalized = normalize(event); Function function = new Function(); Map result = function.handler(normalized); - long endNs = System.nanoTime(); - long endMs = System.currentTimeMillis(); + Instant endTs = Instant.now(); // Format timestamps as "seconds.microseconds" like Python - String beginStr = formatTimestamp(beginMs, beginNs); - String endStr = formatTimestamp(endMs, endNs); + String beginStr = formatTimestamp(beginTs); + String endStr = formatTimestamp(endTs); // Get or create container ID String containerId = ColdStartTracker.getContainerId(); @@ -56,10 +56,9 @@ public Map handleRequest(Map event, Context cont return response; } - private String formatTimestamp(long epochMillis, long nanoTime) { - long seconds = epochMillis / 1000; - // Use nanos for microseconds precision - long microseconds = (nanoTime / 1000) % 1_000_000; + private String formatTimestamp(Instant ts) { + long seconds = ts.getEpochSecond(); + long microseconds = ts.getNano() / 1_000; return String.format("%d.%06d", seconds, microseconds); } diff --git a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java index 208fb179..5a35b491 100644 --- a/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/azure/java/src/main/java/org/serverlessbench/Handler.java @@ -11,6 +11,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Optional; +import java.time.Instant; public class Handler { @@ -27,17 +28,15 @@ public HttpResponseMessage handleRequest( final ExecutionContext context ) { - long beginMs = System.currentTimeMillis(); - long beginNs = System.nanoTime(); + Instant beginTs = Instant.now(); Map normalized = normalizeRequest(request); Function function = new Function(); Map result = function.handler(normalized); - long endNs = System.nanoTime(); - long endMs = System.currentTimeMillis(); + Instant endTs = Instant.now(); // Format timestamps as "seconds.microseconds" like Python - String beginStr = formatTimestamp(beginMs, beginNs); - String endStr = formatTimestamp(endMs, endNs); + String beginStr = formatTimestamp(beginTs); + String endStr = formatTimestamp(endTs); // Get or create container ID String containerId = ColdStartTracker.getContainerId(); @@ -66,10 +65,9 @@ public HttpResponseMessage handleRequest( .build(); } - private String formatTimestamp(long epochMillis, long nanoTime) { - long seconds = epochMillis / 1000; - // Use nanos for microseconds precision - long microseconds = (nanoTime / 1000) % 1_000_000; + private String formatTimestamp(Instant ts) { + long seconds = ts.getEpochSecond(); + long microseconds = ts.getNano() / 1_000; return String.format("%d.%06d", seconds, microseconds); } diff --git a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java index adc0d0af..1e96c5e4 100644 --- a/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/gcp/java/src/main/java/org/serverlessbench/Handler.java @@ -11,6 +11,7 @@ import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; +import java.time.Instant; public class Handler implements HttpFunction { @@ -20,8 +21,7 @@ public class Handler implements HttpFunction { public void service(HttpRequest request, HttpResponse response) throws IOException { - long beginMs = System.currentTimeMillis(); - long beginNs = System.nanoTime(); + Instant beginTs = Instant.now(); // Normalize request from GCP HTTP format Map normalized = normalizeRequest(request); @@ -29,12 +29,11 @@ public void service(HttpRequest request, HttpResponse response) Function function = new Function(); Map result = function.handler(normalized); - long endNs = System.nanoTime(); - long endMs = System.currentTimeMillis(); + Instant endTs = Instant.now(); // Format timestamps as "seconds.microseconds" (SeBS standard) - String beginStr = formatTimestamp(beginMs, beginNs); - String endStr = formatTimestamp(endMs, endNs); + String beginStr = formatTimestamp(beginTs); + String endStr = formatTimestamp(endTs); // Get cold start info String containerId = ColdStartTracker.getContainerId(); @@ -60,9 +59,9 @@ public void service(HttpRequest request, HttpResponse response) writer.write(MAPPER.writeValueAsString(body)); } - private String formatTimestamp(long epochMillis, long nanoTime) { - long seconds = epochMillis / 1000; - long microseconds = (nanoTime / 1000) % 1_000_000; + private String formatTimestamp(Instant ts) { + long seconds = ts.getEpochSecond(); + long microseconds = ts.getNano() / 1_000; return String.format("%d.%06d", seconds, microseconds); } diff --git a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java index 2b7fee57..bf929897 100644 --- a/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java +++ b/benchmarks/wrappers/openwhisk/java/src/main/java/org/serverlessbench/Handler.java @@ -29,8 +29,8 @@ public static JsonObject main(JsonObject args) { String containerId = ColdStartTracker.getContainerId(); // Convert to Unix timestamp in seconds.microseconds - String formattedBegin = String.format("%d.%06d", begin.getEpochSecond(), begin.getNano() / 1000); - String formattedEnd = String.format("%d.%06d", end.getEpochSecond(), end.getNano() / 1000); + String formattedBegin = formatTimestamp(begin); + String formattedEnd = formatTimestamp(end); String requestId = System.getenv("__OW_ACTIVATION_ID"); if (requestId == null) { @@ -53,5 +53,11 @@ public static JsonObject main(JsonObject args) { return jsonResult; } + static private String formatTimestamp(Instant ts) { + long seconds = ts.getEpochSecond(); + long microseconds = ts.getNano() / 1_000; + return String.format("%d.%06d", seconds, microseconds); + } + } From 87fc63b06f2d58fbce69cb2df7ec3dd0c43f7ed4 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:52:47 +0100 Subject: [PATCH 81/82] [regression] Ensure Azure is logged in --- sebs/regression.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sebs/regression.py b/sebs/regression.py index 27fe0109..7e185ae9 100644 --- a/sebs/regression.py +++ b/sebs/regression.py @@ -678,8 +678,10 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): ) # Initialize Azure CLI if not already done + needs_login = False if not hasattr(AzureTestSequenceJava, "cli"): AzureTestSequenceJava.cli = AzureCLI(self.client.config, self.client.docker_client) + needs_login = True # Create a copy of the config and set architecture and deployment type config_copy = copy.deepcopy(cloud_config) @@ -696,7 +698,9 @@ def get_deployment(self, benchmark_name, architecture, deployment_type): ) # Initialize CLI and setup resources (no login needed - reuses previous session) - deployment_client.system_resources.initialize_cli(cli=AzureTestSequenceJava.cli) + deployment_client.system_resources.initialize_cli( + cli=AzureTestSequenceJava.cli, login=needs_login + ) deployment_client.initialize(resource_prefix="regr") return deployment_client From c071f75d29e88de7c1b82813e7d8145f909649f7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 Mar 2026 01:53:03 +0100 Subject: [PATCH 82/82] [system] Fix copy error --- .mypy.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/.mypy.ini b/.mypy.ini index c1e1e295..4f6335e0 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -45,8 +45,6 @@ ignore_missing_imports = True [mypy-googleapiclient.discovery] ignore_missing_imports = True -[mypy-googleapiclient.errors] -ignore_missing_imports = True [mypy-googleapiclient.errors] ignore_missing_imports = True