diff --git a/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.18 b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.18 new file mode 100644 index 00000000..aaa3f883 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.18 @@ -0,0 +1,10 @@ +{ + "name": "", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "sharp": "^0.33" + } +} diff --git a/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.20 b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.20 new file mode 100644 index 00000000..aaa3f883 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.20 @@ -0,0 +1,10 @@ +{ + "name": "", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "sharp": "^0.33" + } +} diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.10 b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.10 new file mode 100644 index 00000000..9caa46c8 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.10 @@ -0,0 +1 @@ +pillow==10.3.0 diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.11 b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.11 new file mode 100644 index 00000000..9caa46c8 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.11 @@ -0,0 +1 @@ +pillow==10.3.0 diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.12 b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.12 new file mode 100644 index 00000000..9caa46c8 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.12 @@ -0,0 +1 @@ +pillow==10.3.0 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.10 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.10 new file mode 100644 index 00000000..ab734881 --- /dev/null +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.10 @@ -0,0 +1,5 @@ +pillow==10.3.0 +https://download.pytorch.org/whl/cpu/torch-1.11.0%2Bcpu-cp310-cp310-linux_x86_64.whl +torchvision==0.12 +# prevent installing numpy 2.0 +numpy==1.22.0 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.11 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.11 new file mode 100644 index 00000000..3288171f --- /dev/null +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.11 @@ -0,0 +1,5 @@ +pillow==10.3.0 +https://download.pytorch.org/whl/cpu/torch-2.0.0%2Bcpu-cp311-cp311-linux_x86_64.whl +torchvision==0.15.1 +# prevent installing numpy 2.0 +numpy==1.24.0 diff --git a/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.10 b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.10 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.10 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.11 b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.11 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.11 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.12 b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.12 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/501.graph-pagerank/python/requirements.txt.3.12 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.10 b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.10 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.10 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.11 b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.11 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.11 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.12 b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.12 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/502.graph-mst/python/requirements.txt.3.12 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.10 b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.10 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.10 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.11 b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.11 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.11 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.12 b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.12 new file mode 100644 index 00000000..e291b7b3 --- /dev/null +++ b/benchmarks/500.scientific/503.graph-bfs/python/requirements.txt.3.12 @@ -0,0 +1 @@ +igraph==0.11.4 diff --git a/config/systems.json b/config/systems.json index bb21dcd9..bf095d3f 100644 --- a/config/systems.json +++ b/config/systems.json @@ -18,7 +18,10 @@ "python": { "base_images": { "3.7": "python:3.7-slim", - "3.8": "python:3.8-slim" + "3.8": "python:3.8-slim", + "3.9": "python:3.9-slim", + "3.10": "python:3.10-slim", + "3.11": "python:3.11-slim" }, "images": [ "run", @@ -34,8 +37,10 @@ }, "nodejs": { "base_images": { - "12": "node:12-slim", - "14": "node:14-slim" + "14": "node:14-slim", + "16": "node:16-slim", + "18": "node:18-slim", + "20": "node:20-slim" }, "images": [ "run", @@ -55,15 +60,11 @@ "languages": { "python": { "base_images": { + "3.11": "amazon/aws-lambda-python:3.11", + "3.10": "amazon/aws-lambda-python:3.10", "3.9": "amazon/aws-lambda-python:3.9", - "3.8": "amazon/aws-lambda-python:3.8", - "3.7": "amazon/aws-lambda-python:3.7" + "3.8": "amazon/aws-lambda-python:3.8" }, - "versions": [ - "3.7", - "3.8", - "3.9" - ], "images": [ "build" ], @@ -77,12 +78,8 @@ }, "nodejs": { "base_images": { - "14": "amazon/aws-lambda-nodejs:14", "16": "amazon/aws-lambda-nodejs:16" }, - "versions": [ - "14", "16" - ], "images": [ "build" ], @@ -104,7 +101,9 @@ "base_images": { "3.7": "mcr.microsoft.com/azure-functions/python:3.0-python3.7", "3.8": "mcr.microsoft.com/azure-functions/python:3.0-python3.8", - "3.9": "mcr.microsoft.com/azure-functions/python:3.0-python3.9" + "3.9": "mcr.microsoft.com/azure-functions/python:3.0-python3.9", + "3.10": "mcr.microsoft.com/azure-functions/python:4-python3.10", + "3.11": "mcr.microsoft.com/azure-functions/python:4-python3.11" }, "images": [ "build" @@ -122,7 +121,9 @@ }, "nodejs": { "base_images": { - "14": "mcr.microsoft.com/azure-functions/node:3.0-node14" + "16": "mcr.microsoft.com/azure-functions/node:4-node16", + "18": "mcr.microsoft.com/azure-functions/node:4-node18", + "20": "mcr.microsoft.com/azure-functions/node:4-node20" }, "images": [ "build" @@ -152,7 +153,10 @@ "base_images": { "3.7": "ubuntu:22.04", "3.8": "ubuntu:22.04", - "3.9": "ubuntu:22.04" + "3.9": "ubuntu:22.04", + "3.10": "ubuntu:22.04", + "3.11": "ubuntu:22.04", + "3.12": "ubuntu:22.04" }, "images": [ "build" @@ -170,10 +174,12 @@ }, "nodejs": { "base_images": { - "10": "gcr.io/google-appengine/nodejs", - "12": "gcr.io/google-appengine/nodejs", - "14": "gcr.io/google-appengine/nodejs", - "16": "gcr.io/google-appengine/nodejs" + "10": "ubuntu:18.04", + "12": "ubuntu:18.04", + "14": "ubuntu:18.04", + "16": "ubuntu:18.04", + "18": "ubuntu:22.04", + "20": "ubuntu:22.04" }, "images": [ "build" @@ -197,7 +203,9 @@ "python": { "base_images": { "3.7": "openwhisk/action-python-v3.7", - "3.9": "openwhisk/action-python-v3.9" + "3.9": "openwhisk/action-python-v3.9", + "3.10": "openwhisk/action-python-v3.10", + "3.11": "openwhisk/action-python-v3.11" }, "images": [ "function" @@ -217,7 +225,10 @@ "nodejs": { "base_images": { "10": "openwhisk/action-nodejs-v10", - "12": "openwhisk/action-nodejs-v12" + "12": "openwhisk/action-nodejs-v12", + "14": "openwhisk/action-nodejs-v14", + "18": "openwhisk/action-nodejs-v18", + "20": "openwhisk/action-nodejs-v20" }, "images": [ "function" diff --git a/dockerfiles/gcp/nodejs/Dockerfile.build b/dockerfiles/gcp/nodejs/Dockerfile.build index fbc4e0bd..477f236b 100755 --- a/dockerfiles/gcp/nodejs/Dockerfile.build +++ b/dockerfiles/gcp/nodejs/Dockerfile.build @@ -1,10 +1,12 @@ ARG BASE_IMAGE FROM ${BASE_IMAGE} ARG VERSION -ENV HOME=/home/${USER} +ENV NVM_DIR=/nvm -RUN install_node --ignore-verification-failure v${VERSION} -RUN apt-get update && apt-get install -y gosu +#RUN install_node --ignore-verification-failure v${VERSION} +RUN apt-get update && apt-get install -y gosu wget +RUN mkdir -p ${NVM_DIR} && wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash +RUN . ${NVM_DIR}/nvm.sh && nvm install ${VERSION} && nvm alias default ${VERSION} && nvm use default RUN mkdir -p /sebs/ COPY dockerfiles/nodejs_installer.sh /sebs/installer.sh @@ -16,3 +18,4 @@ ENV PATH=/usr/sbin:$PATH ENV SCRIPT_FILE=/mnt/function/package.sh CMD /bin/bash /sebs/installer.sh ENTRYPOINT ["/sebs/entrypoint.sh"] + diff --git a/dockerfiles/local/nodejs/server.js b/dockerfiles/local/nodejs/server.js index b40696d7..c98b3fa7 100644 --- a/dockerfiles/local/nodejs/server.js +++ b/dockerfiles/local/nodejs/server.js @@ -9,6 +9,12 @@ const { v4: uuidv4 } = require('uuid'); var app = express(); app.use(express.json()); +app.post('/alive', function (req, res) { + res.send(JSON.stringify({ + status: "ok" + })); +}); + app.post('/', function (req, res) { let begin = Date.now(); diff --git a/dockerfiles/local/python/Dockerfile.build b/dockerfiles/local/python/Dockerfile.build index 9d6402af..5892c650 100755 --- a/dockerfiles/local/python/Dockerfile.build +++ b/dockerfiles/local/python/Dockerfile.build @@ -4,7 +4,7 @@ ARG VERSION ENV PYTHON_VERSION=${VERSION} RUN apt-get update\ - && apt-get install -y --no-install-recommends gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev gosu\ + && apt-get install -y --no-install-recommends gcc build-essential python3-dev libxml2 libxml2-dev zlib1g-dev gosu\ && apt-get purge -y --auto-remove RUN mkdir -p /sebs/ diff --git a/dockerfiles/local/python/server.py b/dockerfiles/local/python/server.py index e86327dc..4ed1314f 100644 --- a/dockerfiles/local/python/server.py +++ b/dockerfiles/local/python/server.py @@ -8,8 +8,14 @@ CODE_LOCATION='/function' +@route('/alive', method='GET') +def alive(): + return { + "result:" "ok" + } + @route('/', method='POST') -def flush_log(): +def process_request(): begin = datetime.datetime.now() from function import function end = datetime.datetime.now() diff --git a/dockerfiles/nodejs_installer.sh b/dockerfiles/nodejs_installer.sh index 7f0710f3..d6fd6a5b 100644 --- a/dockerfiles/nodejs_installer.sh +++ b/dockerfiles/nodejs_installer.sh @@ -1,3 +1,6 @@ #!/bin/bash +if [ -f $FILE ]; then + . /nvm/nvm.sh +fi cd /mnt/function && npm install && rm -rf package-lock.json diff --git a/docs/benchmarks.md b/docs/benchmarks.md index 2d913607..85a8f969 100644 --- a/docs/benchmarks.md +++ b/docs/benchmarks.md @@ -26,7 +26,7 @@ Below, we discuss the most important implementation details of each benchmark. F > Benchmark `411.image-recognition` does not work on AWS with Python 3.9 due to excessive code size. While it is possible to ship the benchmark by zipping `torchvision` and `numpy` (see `benchmarks/400.inference/411.image-recognition/python/package.sh`), this significantly affects cold startup. On the lowest supported memory configuration of 512 MB, the cold startup can reach 30 seconds, making HTTP trigger unusable due to 30 second timeout of API gateway. In future, we might support Docker-based deployment on AWS that are not affected by code size limitations. > [!WARNING] -> Benchmark `411.image-recognition` does not work on GCP with Python 3.8 and 3.9 due to excessive code size. To the best of our knowledge, there is no way of circumventing that limit, as Google Cloud offers neither layers nor custom Docker images. +> Benchmark `411.image-recognition` does not work on GCP with Python 3.8+ due to excessive code size. To the best of our knowledge, there is no way of circumventing that limit, as Google Cloud offers neither layers nor custom Docker images. ## Webapps diff --git a/requirements.txt b/requirements.txt index cd53509b..2717467c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ testtools>=2.4.0 docker>=4.2.0 tzlocal>=2.1 +requests #linting flake8 black==22.8.0 diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index a9a54c1e..78e45963 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -4,6 +4,7 @@ import os import shutil import time +import uuid from typing import cast, Dict, List, Optional, Set, Tuple, Type # noqa import docker @@ -202,7 +203,7 @@ def package_code( "version": "2.0", "extensionBundle": { "id": "Microsoft.Azure.Functions.ExtensionBundle", - "version": "[1.*, 2.0.0)", + "version": "[4.0.0, 5.0.0)", }, } json.dump(default_host_json, open(os.path.join(directory, "host.json"), "w"), indent=2) @@ -215,6 +216,7 @@ def publish_function( self, function: Function, code_package: Benchmark, + container_dest: str, repeat_on_failure: bool = False, ) -> str: success = False @@ -223,7 +225,7 @@ def publish_function( while not success: try: ret = self.cli_instance.execute( - "bash -c 'cd /mnt/function " + f"bash -c 'cd {container_dest} " "&& func azure functionapp publish {} --{} --no-build'".format( function.name, self.AZURE_RUNTIMES[code_package.language_name] ) @@ -286,8 +288,8 @@ def publish_function( def update_function(self, function: Function, code_package: Benchmark): # Mount code package in Docker instance - self._mount_function_code(code_package) - url = self.publish_function(function, code_package, True) + container_dest = self._mount_function_code(code_package) + url = self.publish_function(function, code_package, container_dest, True) trigger = HTTPTrigger(url, self.config.resources.data_storage_account(self.cli_instance)) trigger.logging_handlers = self.logging_handlers @@ -299,8 +301,10 @@ def update_function_configuration(self, function: Function, code_package: Benchm "Updating function's memory and timeout configuration is not supported." ) - def _mount_function_code(self, code_package: Benchmark): - self.cli_instance.upload_package(code_package.code_location, "/mnt/function/") + def _mount_function_code(self, code_package: Benchmark) -> str: + dest = os.path.join("/mnt", "function", uuid.uuid4().hex) + self.cli_instance.upload_package(code_package.code_location, dest) + return dest def default_function_name(self, code_package: Benchmark) -> str: """ @@ -366,6 +370,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> AzureFunct " --os-type Linux --consumption-plan-location {region} " " --runtime {runtime} --runtime-version {runtime_version} " " --name {func_name} --storage-account {storage_account}" + " --functions-version 4 " ).format(**config) ) self.logging.info("Azure: Created function app {}".format(func_name)) diff --git a/sebs/azure/cli.py b/sebs/azure/cli.py index 96ccf65e..b875ee02 100644 --- a/sebs/azure/cli.py +++ b/sebs/azure/cli.py @@ -85,10 +85,24 @@ def login(self, appId: str, tenant: str, password: str) -> bytes: return result def upload_package(self, directory: str, dest: str): + + """ + This is not an efficient and memory-intensive implementation. + So far, we didn't have very large functions that require many gigabytes. + + Since docker-py does not support a straightforward copy, and we can't + put_archive in chunks. + + If we end up having problems because of the archive size, there are two + potential solutions: + (1) manually call docker cp and decompress + (2) commit the docker container and restart with a new mount volume. + """ handle = io.BytesIO() with tarfile.open(fileobj=handle, mode="w:gz") as tar: for f in os.listdir(directory): tar.add(os.path.join(directory, f), arcname=f) + # shutil.make_archive(, 'zip', directory) # move to the beginning of memory before writing handle.seek(0) self.execute("mkdir -p {}".format(dest)) diff --git a/sebs/local/function.py b/sebs/local/function.py index a5eb2406..1db9d4cb 100644 --- a/sebs/local/function.py +++ b/sebs/local/function.py @@ -67,6 +67,10 @@ def __init__( self._measurement_pid = measurement_pid + @property + def url(self) -> str: + return self._url + @property def memory_measurement_pid(self) -> Optional[int]: return self._measurement_pid diff --git a/sebs/local/local.py b/sebs/local/local.py index fb9bd1e1..5a4eb18f 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -1,5 +1,7 @@ import os +import requests import shutil +import time from typing import cast, Dict, List, Optional, Type, Tuple # noqa import subprocess @@ -221,6 +223,24 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc function_cfg, pid, ) + + # Wait until server starts + max_attempts = 10 + attempts = 0 + while attempts < max_attempts: + try: + requests.get(f"http://{func.url}/alive") + break + except requests.exceptions.ConnectionError: + time.sleep(0.25) + attempts += 1 + + if attempts == max_attempts: + raise RuntimeError( + f"Couldn't start {func_name} function at container " + f"{container.id} , running on {func._url}" + ) + self.logging.info( f"Started {func_name} function at container {container.id} , running on {func._url}" ) diff --git a/sebs/regression.py b/sebs/regression.py index 3084bc88..71012284 100644 --- a/sebs/regression.py +++ b/sebs/regression.py @@ -126,6 +126,7 @@ def get_deployment(self, benchmark_name): self.client.output_dir, f"regression_{deployment_name}_{benchmark_name}.log" ), ) + with AWSTestSequencePython.lock: deployment_client.initialize(resource_prefix="regression") return deployment_client @@ -229,7 +230,8 @@ def get_deployment(self, benchmark_name): cloud_config, logging_filename=f"regression_{deployment_name}_{benchmark_name}.log", ) - deployment_client.initialize() + with GCPTestSequencePython.lock: + deployment_client.initialize(resource_prefix="regression") return deployment_client @@ -247,7 +249,8 @@ def get_deployment(self, benchmark_name): cloud_config, logging_filename=f"regression_{deployment_name}_{benchmark_name}.log", ) - deployment_client.initialize() + with GCPTestSequenceNodejs.lock: + deployment_client.initialize(resource_prefix="regression") return deployment_client @@ -322,7 +325,12 @@ def filter_out_benchmarks( benchmark: str, deployment_name: str, language: str, language_version: str ) -> bool: - if deployment_name == "aws" and language == "python" and language_version == "3.9": + if (deployment_name == "aws" and language == "python" + and language_version in ["3.9", "3.10", "3.11"]): + return "411.image-recognition" not in benchmark + + if (deployment_name == "gcp" and language == "python" + and language_version in ["3.8", "3.9", "3.10", "3.11", "3.12"]): return "411.image-recognition" not in benchmark return True