From b549469b22af896f8b151690092b9ff6f7384094 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 7 Nov 2025 16:20:24 +0200 Subject: [PATCH 001/170] ZTS initial support for FrankenPHP --- .devcontainer/centos_franken_php/Dockerfile | 99 +++++++++++++++ .../centos_franken_php/devcontainer.json | 34 ++++++ .../centos_franken_php_arm/Dockerfile | 114 ++++++++++++++++++ .../centos_franken_php_arm/devcontainer.json | 32 +++++ lib/php-extension/Action.cpp | 14 ++- lib/php-extension/Aikido.cpp | 73 +++++++++-- lib/php-extension/Cache.cpp | 3 - lib/php-extension/Environment.cpp | 18 ++- lib/php-extension/GoWrappers.cpp | 58 ++++----- lib/php-extension/Handle.cpp | 16 +-- lib/php-extension/HandleFileCompilation.cpp | 18 +-- lib/php-extension/HandlePathAccess.cpp | 10 +- lib/php-extension/HandleQueries.cpp | 24 ++-- lib/php-extension/HandleRateLimitGroup.cpp | 6 +- lib/php-extension/HandleSetToken.cpp | 2 +- lib/php-extension/HandleShellExecution.cpp | 2 +- .../HandleShouldBlockRequest.cpp | 32 ++--- lib/php-extension/HandleUrls.cpp | 36 +++--- lib/php-extension/HandleUsers.cpp | 8 +- lib/php-extension/HookAst.cpp | 43 +++---- lib/php-extension/Log.cpp | 2 +- lib/php-extension/Packages.cpp | 2 +- lib/php-extension/PhpLifecycle.cpp | 16 ++- lib/php-extension/RequestProcessor.cpp | 27 +++-- lib/php-extension/Server.cpp | 6 +- lib/php-extension/Stats.cpp | 22 ++-- lib/php-extension/Utils.cpp | 7 ++ lib/php-extension/include/Action.h | 2 - lib/php-extension/include/Cache.h | 3 - lib/php-extension/include/HookAst.h | 7 -- lib/php-extension/include/Includes.h | 8 +- lib/php-extension/include/PhpLifecycle.h | 2 - lib/php-extension/include/RequestProcessor.h | 2 - lib/php-extension/include/Server.h | 2 - lib/php-extension/include/Stats.h | 1 - lib/php-extension/include/Utils.h | 2 + lib/php-extension/include/php_aikido.h | 31 +++++ package/rpm/aikido.spec | 79 +++++++++++- tools/server_tests/php_built_in/main.py | 2 +- 39 files changed, 643 insertions(+), 222 deletions(-) create mode 100644 .devcontainer/centos_franken_php/Dockerfile create mode 100644 .devcontainer/centos_franken_php/devcontainer.json create mode 100644 .devcontainer/centos_franken_php_arm/Dockerfile create mode 100644 .devcontainer/centos_franken_php_arm/devcontainer.json diff --git a/.devcontainer/centos_franken_php/Dockerfile b/.devcontainer/centos_franken_php/Dockerfile new file mode 100644 index 000000000..afdc60b5a --- /dev/null +++ b/.devcontainer/centos_franken_php/Dockerfile @@ -0,0 +1,99 @@ +FROM --platform=linux/amd64 centos:8 + +ARG PHP_VERSION=8.4 +ARG PHP_FULL_VERSION=8.4.14 +ARG FRANKENPHP_VERSION=1.9.1 + +WORKDIR /etc/yum.repos.d/ +RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* +RUN sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* +RUN yum update -y +RUN yum install -y yum-utils +RUN yum install -y https://rpms.remirepo.net/enterprise/remi-release-8.4.rpm +RUN yum install -y epel-release +RUN dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled PowerTools || true +RUN yum install -y httpd +RUN yum install -y cpio +RUN yum install -y unzip +RUN yum install -y nano +RUN yum install -y lsof +RUN yum install -y jq +RUN yum install -y libcurl-devel +RUN curl -O https://dl.google.com/go/go1.23.3.linux-amd64.tar.gz +RUN tar -C /usr/local -xzf go1.23.3.linux-amd64.tar.gz +ENV PATH="/usr/local/go/bin:${PATH}" +RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest +RUN go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest +ENV PROTOC_ZIP=protoc-28.3-linux-x86_64.zip +RUN curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP +RUN unzip -o $PROTOC_ZIP -d /usr/local bin/protoc +RUN unzip -o $PROTOC_ZIP -d /usr/local include/* +RUN rm -f $PROTOC_ZIP +ENV PATH="$HOME/go/bin:${PATH}" +RUN yum install -y rpmdevtools +RUN yum install -y git +RUN yum install -y nginx +RUN yum install -y sudo +RUN yum install -y gcc gcc-c++ make +RUN dnf -y module enable python39 +RUN yum install -y python39 python39-devel +RUN ln -sf /usr/bin/python3.9 /usr/bin/python3 && ln -sf /usr/bin/python3.9 /usr/bin/python +RUN pip3 install psutil flask requests --quiet --no-input + +# Checkout PHP sources at the requested full version tag/branch (e.g., PHP-8.4.14) +RUN git clone --depth 1 --branch PHP-${PHP_FULL_VERSION} https://github.com/php/php-src.git /usr/local/src/php-src + +## Build and install a recent re2c required by PHP build system +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install + +RUN yum install -y \ + autoconf automake libtool bison pkgconfig \ + libxml2-devel \ + oniguruma-devel \ + libicu-devel \ + mariadb-devel \ + openssl-devel \ + zlib-devel \ + libzip-devel \ + sqlite-devel \ + && cd /usr/local/src/php-src \ + && sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac \ + && ./buildconf --force \ + && CFLAGS="$CFLAGS -fPIE -fPIC" LDFLAGS="$LDFLAGS -pie" ./configure \ + --enable-embed \ + --enable-zts \ + --enable-pdo \ + --disable-zend-signals \ + --enable-zend-max-execution-timers \ + --with-extra-version="" \ + --with-config-file-scan-dir=/etc/php.d \ + --enable-mbstring \ + --enable-pcntl \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ + --with-sqlite3 \ + --with-pdo-sqlite \ + && make -j"$(nproc)" \ + && make install + +RUN mkdir -p /etc/php.d + +RUN ln -sf /usr/local/bin/php /usr/bin/php + +# Install FrankenPHP from RPM +RUN FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-${FRANKENPHP_VERSION}-1.x86_64.rpm" \ + && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ + && yum install -y /tmp/frankenphp.rpm \ + && rm -f /tmp/frankenphp.rpm + + diff --git a/.devcontainer/centos_franken_php/devcontainer.json b/.devcontainer/centos_franken_php/devcontainer.json new file mode 100644 index 000000000..8b7233269 --- /dev/null +++ b/.devcontainer/centos_franken_php/devcontainer.json @@ -0,0 +1,34 @@ +{ + "name": "Centos FrankenPHP Dev Container", + "runArgs": [ + "--privileged" + ], + "mounts": [ + "source=${localWorkspaceFolder}/.devcontainer/shared,target=/shared,type=bind" + ], + "build": { + "platform": "linux/amd64", + "dockerfile": "Dockerfile", + "args": { + "PHP_VERSION": "8.4", + "PHP_FULL_VERSION": "8.4.14", + "FRANKENPHP_VERSION": "1.9.1" + } + }, + "remoteUser": "root", + "customizations": { + "vscode": { + "extensions": [ + "golang.go", + "github.vscode-github-actions", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools", + "ms-vscode.cpptools-themes", + "austin.code-gnu-global", + "ms-vscode.makefile-tools", + "ms-python.vscode-pylance" + ] + } + } +} + diff --git a/.devcontainer/centos_franken_php_arm/Dockerfile b/.devcontainer/centos_franken_php_arm/Dockerfile new file mode 100644 index 000000000..97c99501d --- /dev/null +++ b/.devcontainer/centos_franken_php_arm/Dockerfile @@ -0,0 +1,114 @@ +FROM --platform=linux/arm64 centos:8 + +ARG PHP_VERSION=8.4 +ARG PHP_FULL_VERSION=8.4.14 +ARG FRANKENPHP_VERSION=1.9.1 + +WORKDIR /etc/yum.repos.d/ +RUN sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* \ + && sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* \ + && yum update -y \ + && yum install -y yum-utils \ + && yum install -y https://rpms.remirepo.net/enterprise/remi-release-8.4.rpm \ + && yum install -y epel-release \ + && dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled PowerTools || true \ + && yum install -y httpd cpio unzip nano lsof jq libcurl-devel rpmdevtools git nginx sudo gcc gcc-c++ make \ + && yum clean all && rm -rf /var/cache/yum + +# Go toolchain (arm64) and protoc (aarch_64) similar to centos_arm +RUN curl -O https://dl.google.com/go/go1.23.3.linux-arm64.tar.gz \ + && tar -C /usr/local -xzf go1.23.3.linux-arm64.tar.gz +ENV PATH="/usr/local/go/bin:${PATH}" +RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest \ + && go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest +ENV PROTOC_ZIP=protoc-30.2-linux-aarch_64.zip +RUN curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v30.2/$PROTOC_ZIP \ + && unzip -o $PROTOC_ZIP -d /usr/local bin/protoc \ + && unzip -o $PROTOC_ZIP -d /usr/local include/* \ + && rm -f $PROTOC_ZIP +ENV PATH="$HOME/go/bin:${PATH}" + +# Python 3.9 for PHP build helpers +RUN dnf -y module enable python39 \ + && yum install -y python39 python39-devel \ + && ln -sf /usr/bin/python3.9 /usr/bin/python3 && ln -sf /usr/bin/python3.9 /usr/bin/python + +RUN pip3 install psutil flask requests --quiet --no-input + +# Fetch PHP sources +RUN git clone --depth 1 --branch PHP-${PHP_FULL_VERSION} https://github.com/php/php-src.git /usr/local/src/php-src + +# Build re2c from source (arm64) +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c.tar.xz /tmp/re2c-src + +# Build latest GDB from release tarball (more stable than git) +ENV GDB_VERSION=14.2 +RUN yum install -y texinfo mpfr-devel gmp-devel ncurses-devel readline-devel zlib-devel expat-devel \ + libmpc-devel gettext-devel \ + && curl -fsSL -o /tmp/gdb-${GDB_VERSION}.tar.xz https://ftp.gnu.org/gnu/gdb/gdb-${GDB_VERSION}.tar.xz \ + && mkdir -p /tmp/gdb-src \ + && tar -xJf /tmp/gdb-${GDB_VERSION}.tar.xz -C /tmp/gdb-src --strip-components=1 \ + && cd /tmp/gdb-src \ + && ./configure --prefix=/usr/local \ + --with-python=/usr/bin/python3.9 \ + --enable-tui \ + --with-readline \ + && make -j"$(nproc)" \ + && make install \ + && echo 'set auto-load safe-path /' > /root/.gdbinit + +RUN yum install -y \ + autoconf automake libtool bison pkgconfig \ + libxml2-devel \ + oniguruma-devel \ + libicu-devel \ + mariadb-devel \ + openssl-devel \ + libzip-devel \ + sqlite-devel \ + && cd /usr/local/src/php-src \ + && sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac \ + && ./buildconf --force \ + && CFLAGS="$CFLAGS -fPIE -fPIC" LDFLAGS="$LDFLAGS -pie" ./configure \ + --enable-embed \ + --enable-zts \ + --enable-pdo \ + --disable-zend-signals \ + --enable-zend-max-execution-timers \ + --with-extra-version="" \ + --with-config-file-scan-dir=/etc/php.d \ + --enable-mbstring \ + --enable-pcntl \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ + --with-sqlite3 \ + --with-pdo-sqlite \ + && make -j"$(nproc)" \ + && make install + +RUN mkdir -p /etc/php.d + +RUN ln -sf /usr/local/bin/php /usr/bin/php + +# Try to install FrankenPHP for arm64 (optional) +RUN FRANKENPHP_ARCH=aarch64 \ + && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-${FRANKENPHP_VERSION}-1.${FRANKENPHP_ARCH}.rpm" \ + && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ + && yum install -y /tmp/frankenphp.rpm || true \ + && rm -f /tmp/frankenphp.rpm || true + +# Final PATH to include phpize/php-config if installed +ENV PATH="/usr/local/bin:/usr/local/sbin:${PATH}" + + diff --git a/.devcontainer/centos_franken_php_arm/devcontainer.json b/.devcontainer/centos_franken_php_arm/devcontainer.json new file mode 100644 index 000000000..93d7c634c --- /dev/null +++ b/.devcontainer/centos_franken_php_arm/devcontainer.json @@ -0,0 +1,32 @@ +{ + "name": "Centos FrankenPHP (arm64)", + "runArgs": [], + "mounts": [ + "source=${localWorkspaceFolder}/.devcontainer/shared,target=/shared,type=bind" + ], + "build": { + "platform": "linux/arm64", + "dockerfile": "Dockerfile", + "args": { + "PHP_VERSION": "8.4", + "PHP_FULL_VERSION": "8.4.14", + "FRANKENPHP_VERSION": "1.9.1" + } + }, + "remoteUser": "root", + "customizations": { + "vscode": { + "extensions": [ + "golang.go", + "github.vscode-github-actions", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools", + "ms-vscode.cpptools-themes", + "austin.code-gnu-global", + "ms-vscode.makefile-tools" + ] + } + } +} + + diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index fe238145c..e84d0c070 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -1,12 +1,12 @@ #include "Includes.h" -Action action; +#define CONTENT_TYPE_HEADER "Content-Type: text/plain" ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); + SG(sapi_headers).http_response_code = _code; zend_throw_exception(zend_exception_get_default(), _message.c_str(), _code); - CallPhpFunctionWithOneParam("http_response_code", _code); return BLOCK; } @@ -16,8 +16,14 @@ ACTION_STATUS Action::executeExit(json &event) { // CallPhpFunction("ob_clean"); CallPhpFunction("header_remove"); - CallPhpFunctionWithOneParam("http_response_code", _response_code); - CallPhpFunctionWithOneParam("header", "Content-Type: text/plain"); + SG(sapi_headers).http_response_code = _response_code; + + sapi_header_line ctr = {0}; + ctr.line = CONTENT_TYPE_HEADER; + ctr.line_len = sizeof(CONTENT_TYPE_HEADER) - 1; + ctr.response_code = 0; + sapi_header_op(SAPI_HEADER_REPLACE, &ctr); + CallPhpEcho(_message); CallPhpExit(); return EXIT; diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 40d26dce8..e05907803 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -16,14 +16,14 @@ PHP_MINIT_FUNCTION(aikido) { return SUCCESS; } - phpLifecycle.HookAll(); + AIKIDO_GLOBAL(phpLifecycle).HookAll(); /* If SAPI name is "cli" run in "simple" mode */ if (AIKIDO_GLOBAL(sapi_name) == "cli") { AIKIDO_LOG_INFO("MINIT finished earlier because we run in CLI mode!\n"); return SUCCESS; } - phpLifecycle.ModuleInit(); + AIKIDO_GLOBAL(phpLifecycle).ModuleInit(); AIKIDO_LOG_INFO("MINIT finished!\n"); return SUCCESS; } @@ -41,7 +41,7 @@ PHP_MSHUTDOWN_FUNCTION(aikido) { /* If SAPI name is "cli" run in "simple" mode */ if (AIKIDO_GLOBAL(sapi_name) == "cli") { AIKIDO_LOG_INFO("MSHUTDOWN finished earlier because we run in CLI mode!\n"); - phpLifecycle.UnhookAll(); + AIKIDO_GLOBAL(phpLifecycle).UnhookAll(); return SUCCESS; } @@ -52,7 +52,7 @@ PHP_MSHUTDOWN_FUNCTION(aikido) { The same does not apply for CLI mode, where the MSHUTDOWN is called only once. */ - phpLifecycle.ModuleShutdown(); + AIKIDO_GLOBAL(phpLifecycle).ModuleShutdown(); AIKIDO_LOG_DEBUG("MSHUTDOWN finished!\n"); return SUCCESS; } @@ -66,7 +66,7 @@ PHP_RINIT_FUNCTION(aikido) { return SUCCESS; } - phpLifecycle.RequestInit(); + AIKIDO_GLOBAL(phpLifecycle).RequestInit(); AIKIDO_LOG_DEBUG("RINIT finished!\n"); return SUCCESS; } @@ -82,7 +82,7 @@ PHP_RSHUTDOWN_FUNCTION(aikido) { } DestroyAstToClean(); - phpLifecycle.RequestShutdown(); + AIKIDO_GLOBAL(phpLifecycle).RequestShutdown(); AIKIDO_LOG_DEBUG("RSHUTDOWN finished!\n"); return SUCCESS; } @@ -102,6 +102,63 @@ static const zend_function_entry ext_functions[] = { ZEND_FE_END }; +PHP_GINIT_FUNCTION(aikido) { + aikido_globals->environment_loaded = false; + aikido_globals->log_level = 0; + aikido_globals->blocking = false; + aikido_globals->disable = false; + aikido_globals->disk_logs = false; + aikido_globals->collect_api_schema = false; + aikido_globals->trust_proxy = false; + aikido_globals->localhost_allowed_by_default = false; + aikido_globals->report_stats_interval_to_agent = 0; + aikido_globals->currentRequestStart = std::chrono::high_resolution_clock::time_point{}; + aikido_globals->totalOverheadForCurrentRequest = 0; + aikido_globals->laravelEnvLoaded = false; + aikido_globals->checkedAutoBlock = false; + aikido_globals->checkedShouldBlockRequest = false; + aikido_globals->global_ast_to_clean = nullptr; + aikido_globals->original_ast_process = nullptr; + new (&aikido_globals->log_level_str) std::string(); + new (&aikido_globals->sapi_name) std::string(); + new (&aikido_globals->token) std::string(); + new (&aikido_globals->endpoint) std::string(); + new (&aikido_globals->config_endpoint) std::string(); + new (&aikido_globals->logger) Log(); + new (&aikido_globals->agent) Agent(); + new (&aikido_globals->server) Server(); + new (&aikido_globals->requestProcessor) RequestProcessor(); + new (&aikido_globals->action) Action(); + new (&aikido_globals->requestCache) RequestCache(); + new (&aikido_globals->eventCache) EventCache(); + new (&aikido_globals->phpLifecycle) PhpLifecycle(); + new (&aikido_globals->stats) std::unordered_map(); + new (&aikido_globals->laravelEnv) std::unordered_map(); +} + +PHP_GSHUTDOWN_FUNCTION(aikido) { + if (aikido_globals->global_ast_to_clean) { + zend_hash_destroy(aikido_globals->global_ast_to_clean); + FREE_HASHTABLE(aikido_globals->global_ast_to_clean); + aikido_globals->global_ast_to_clean = nullptr; + } + aikido_globals->laravelEnv.~unordered_map(); + aikido_globals->stats.~unordered_map(); + aikido_globals->phpLifecycle.~PhpLifecycle(); + aikido_globals->eventCache.~EventCache(); + aikido_globals->requestCache.~RequestCache(); + aikido_globals->action.~Action(); + aikido_globals->requestProcessor.~RequestProcessor(); + aikido_globals->server.~Server(); + aikido_globals->logger.~Log(); + aikido_globals->agent.~Agent(); + aikido_globals->config_endpoint.~string(); + aikido_globals->endpoint.~string(); + aikido_globals->token.~string(); + aikido_globals->sapi_name.~string(); + aikido_globals->log_level_str.~string(); +} + zend_module_entry aikido_module_entry = { STANDARD_MODULE_HEADER, "aikido", /* Extension name */ @@ -113,8 +170,8 @@ zend_module_entry aikido_module_entry = { PHP_MINFO(aikido), /* PHP_MINFO - Module info */ PHP_AIKIDO_VERSION, /* Version */ PHP_MODULE_GLOBALS(aikido), /* Module globals */ - NULL, /* PHP_GINIT – Globals initialization */ - NULL, /* PHP_GSHUTDOWN – Globals shutdown */ + PHP_GINIT(aikido), /* PHP_GINIT – Globals initialization */ + PHP_GSHUTDOWN(aikido), /* PHP_GSHUTDOWN – Globals shutdown */ NULL, STANDARD_MODULE_PROPERTIES_EX, }; diff --git a/lib/php-extension/Cache.cpp b/lib/php-extension/Cache.cpp index 323d233bf..60c569d6d 100644 --- a/lib/php-extension/Cache.cpp +++ b/lib/php-extension/Cache.cpp @@ -1,8 +1,5 @@ #include "Includes.h" -RequestCache requestCache; -EventCache eventCache; - void RequestCache::Reset() { *this = RequestCache(); } diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 264f2558a..7e2dc7ea6 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -33,15 +33,13 @@ std::string GetSystemEnvVariable(const std::string& env_key) { return env_value; } -std::unordered_map laravelEnv; -bool laravelEnvLoaded = false; bool LoadLaravelEnvFile() { - if (laravelEnvLoaded) { + if (AIKIDO_GLOBAL(laravelEnvLoaded)) { return true; } - std::string docRoot = server.GetVar("DOCUMENT_ROOT"); + std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); AIKIDO_LOG_DEBUG("Trying to load .env file, starting with DOCUMENT_ROOT: %s\n", docRoot.c_str()); if (docRoot.empty()) { AIKIDO_LOG_DEBUG("DOCUMENT_ROOT is empty!\n"); @@ -89,23 +87,23 @@ bool LoadLaravelEnvFile() { (value.front() == '\'' && value.back() == '\''))) { value = value.substr(1, value.length() - 2); } - laravelEnv[key] = value; + AIKIDO_GLOBAL(laravelEnv)[key] = value; } } } - laravelEnvLoaded = true; + AIKIDO_GLOBAL(laravelEnvLoaded) = true; AIKIDO_LOG_DEBUG("Loaded Laravel env file: %s\n", laravelEnvPath.c_str()); return true; } std::string GetLaravelEnvVariable(const std::string& env_key) { - if (laravelEnv.find(env_key) != laravelEnv.end()) { + if (AIKIDO_GLOBAL(laravelEnv).find(env_key) != AIKIDO_GLOBAL(laravelEnv).end()) { if (env_key == "AIKIDO_TOKEN") { - AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AnonymizeToken(laravelEnv[env_key]).c_str()); + AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AnonymizeToken(AIKIDO_GLOBAL(laravelEnv)[env_key]).c_str()); } else { - AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), laravelEnv[env_key].c_str()); + AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AIKIDO_GLOBAL(laravelEnv)[env_key].c_str()); } - return laravelEnv[env_key]; + return AIKIDO_GLOBAL(laravelEnv)[env_key]; } return ""; } diff --git a/lib/php-extension/GoWrappers.cpp b/lib/php-extension/GoWrappers.cpp index 23dd1d984..71d0a5dfc 100644 --- a/lib/php-extension/GoWrappers.cpp +++ b/lib/php-extension/GoWrappers.cpp @@ -1,11 +1,13 @@ #include "Includes.h" GoString GoCreateString(const std::string& s) { - return GoString{s.c_str(), s.length()}; + return GoString{ s.c_str(), static_cast(s.size()) }; } GoSlice GoCreateSlice(const std::vector& v) { - return GoSlice{ (void*)v.data(), v.size(), v.capacity() }; + return GoSlice{ static_cast(const_cast(v.data())), + static_cast(v.size()), + static_cast(v.capacity()) }; } /* Callback wrapper called by the RequestProcessor (GO) whenever it needs data from PHP (C++ extension). @@ -18,79 +20,79 @@ char* GoContextCallback(int callbackId) { switch (callbackId) { case CONTEXT_REMOTE_ADDRESS: ctx = "REMOTE_ADDRESS"; - ret = server.GetVar("REMOTE_ADDR"); + ret = AIKIDO_GLOBAL(server).GetVar("REMOTE_ADDR"); break; case CONTEXT_METHOD: ctx = "METHOD"; - ret = server.GetVar("REQUEST_METHOD"); + ret = AIKIDO_GLOBAL(server).GetVar("REQUEST_METHOD"); break; case CONTEXT_ROUTE: ctx = "ROUTE"; - ret = server.GetRoute(); + ret = AIKIDO_GLOBAL(server).GetRoute(); break; case CONTEXT_STATUS_CODE: ctx = "STATUS_CODE"; - ret = server.GetStatusCode(); + ret = AIKIDO_GLOBAL(server).GetStatusCode(); break; case CONTEXT_BODY: ctx = "BODY"; - ret = server.GetBody(); + ret = AIKIDO_GLOBAL(server).GetBody(); break; case CONTEXT_HEADER_X_FORWARDED_FOR: ctx = "HEADER_X_FORWARDED_FOR"; - ret = server.GetVar("HTTP_X_FORWARDED_FOR"); + ret = AIKIDO_GLOBAL(server).GetVar("HTTP_X_FORWARDED_FOR"); break; case CONTEXT_COOKIES: ctx = "COOKIES"; - ret = server.GetVar("HTTP_COOKIE"); + ret = AIKIDO_GLOBAL(server).GetVar("HTTP_COOKIE"); break; case CONTEXT_QUERY: ctx = "QUERY"; - ret = server.GetQuery(); + ret = AIKIDO_GLOBAL(server).GetQuery(); break; case CONTEXT_HTTPS: ctx = "HTTPS"; - ret = server.GetVar("HTTPS"); + ret = AIKIDO_GLOBAL(server).GetVar("HTTPS"); break; case CONTEXT_URL: ctx = "URL"; - ret = server.GetUrl(); + ret = AIKIDO_GLOBAL(server).GetUrl(); break; case CONTEXT_HEADERS: ctx = "HEADERS"; - ret = server.GetHeaders(); + ret = AIKIDO_GLOBAL(server).GetHeaders(); break; case CONTEXT_HEADER_USER_AGENT: ctx = "USER_AGENT"; - ret = server.GetVar("HTTP_USER_AGENT"); + ret = AIKIDO_GLOBAL(server).GetVar("HTTP_USER_AGENT"); break; case CONTEXT_USER_ID: ctx = "USER_ID"; - ret = requestCache.userId; + ret = AIKIDO_GLOBAL(requestCache).userId; break; case CONTEXT_USER_NAME: ctx = "USER_NAME"; - ret = requestCache.userName; + ret = AIKIDO_GLOBAL(requestCache).userName; break; case CONTEXT_RATE_LIMIT_GROUP: ctx = "RATE_LIMIT_GROUP"; - ret = requestCache.rateLimitGroup; + ret = AIKIDO_GLOBAL(requestCache).rateLimitGroup; break; case FUNCTION_NAME: ctx = "FUNCTION_NAME"; - ret = eventCache.functionName; + ret = AIKIDO_GLOBAL(eventCache).functionName; break; case OUTGOING_REQUEST_URL: ctx = "OUTGOING_REQUEST_URL"; - ret = eventCache.outgoingRequestUrl; + ret = AIKIDO_GLOBAL(eventCache).outgoingRequestUrl; break; case OUTGOING_REQUEST_EFFECTIVE_URL: ctx = "OUTGOING_REQUEST_EFFECTIVE_URL"; - ret = eventCache.outgoingRequestEffectiveUrl; + ret = AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl; break; case OUTGOING_REQUEST_PORT: ctx = "OUTGOING_REQUEST_PORT"; - ret = eventCache.outgoingRequestPort; + ret = AIKIDO_GLOBAL(eventCache).outgoingRequestPort; break; case OUTGOING_REQUEST_EFFECTIVE_URL_PORT: ctx = "OUTGOING_REQUEST_EFFECTIVE_URL_PORT"; @@ -98,31 +100,31 @@ char* GoContextCallback(int callbackId) { break; case OUTGOING_REQUEST_RESOLVED_IP: ctx = "OUTGOING_REQUEST_RESOLVED_IP"; - ret = eventCache.outgoingRequestResolvedIp; + ret = AIKIDO_GLOBAL(eventCache).outgoingRequestResolvedIp; break; case CMD: ctx = "CMD"; - ret = eventCache.cmd; + ret = AIKIDO_GLOBAL(eventCache).cmd; break; case FILENAME: ctx = "FILENAME"; - ret = eventCache.filename; + ret = AIKIDO_GLOBAL(eventCache).filename; break; case FILENAME2: ctx = "FILENAME2"; - ret = eventCache.filename2; + ret = AIKIDO_GLOBAL(eventCache).filename2; break; case SQL_QUERY: ctx = "SQL_QUERY"; - ret = eventCache.sqlQuery; + ret = AIKIDO_GLOBAL(eventCache).sqlQuery; break; case SQL_DIALECT: ctx = "SQL_DIALECT"; - ret = eventCache.sqlDialect; + ret = AIKIDO_GLOBAL(eventCache).sqlDialect; break; case MODULE: ctx = "MODULE"; - ret = eventCache.moduleName; + ret = AIKIDO_GLOBAL(eventCache).moduleName; break; case STACK_TRACE: ctx = "STACK_TRACE"; diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index c01f1b34e..e91783f3d 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -7,19 +7,19 @@ ACTION_STATUS aikido_process_event(EVENT_ID& eventId, std::string& sink) { } std::string outputEvent; - requestProcessor.SendEvent(eventId, outputEvent); + AIKIDO_GLOBAL(requestProcessor).SendEvent(eventId, outputEvent); - if (action.IsDetection(outputEvent)) { - stats[sink].IncrementAttacksDetected(); + if (AIKIDO_GLOBAL(action).IsDetection(outputEvent)) { + AIKIDO_GLOBAL(stats)[sink].IncrementAttacksDetected(); } - if (!requestProcessor.IsBlockingEnabled()) { + if (!AIKIDO_GLOBAL(requestProcessor).IsBlockingEnabled()) { return CONTINUE; } - ACTION_STATUS action_status = action.Execute(outputEvent); + ACTION_STATUS action_status = AIKIDO_GLOBAL(action).Execute(outputEvent); if (action_status == BLOCK) { - stats[sink].IncrementAttacksBlocked(); + AIKIDO_GLOBAL(stats)[sink].IncrementAttacksBlocked(); } return action_status; } @@ -36,8 +36,8 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { std::string outputEvent; bool caughtException = false; - eventCache.Reset(); - eventCache.functionName = ZSTR_VAL(execute_data->func->common.function_name); + AIKIDO_GLOBAL(eventCache).Reset(); + AIKIDO_GLOBAL(eventCache).functionName = ZSTR_VAL(execute_data->func->common.function_name); try { zend_execute_data* exec_data = EG(current_execute_data); diff --git a/lib/php-extension/HandleFileCompilation.cpp b/lib/php-extension/HandleFileCompilation.cpp index c8faad1a3..a47d15c84 100644 --- a/lib/php-extension/HandleFileCompilation.cpp +++ b/lib/php-extension/HandleFileCompilation.cpp @@ -1,34 +1,34 @@ #include "Includes.h" zend_op_array* handle_file_compilation(zend_file_handle* file_handle, int type) { - eventCache.Reset(); + AIKIDO_GLOBAL(eventCache).Reset(); switch (type) { case ZEND_INCLUDE: - eventCache.functionName = "include"; + AIKIDO_GLOBAL(eventCache).functionName = "include"; break; case ZEND_INCLUDE_ONCE: - eventCache.functionName = "include_once"; + AIKIDO_GLOBAL(eventCache).functionName = "include_once"; break; case ZEND_REQUIRE: - eventCache.functionName = "require"; + AIKIDO_GLOBAL(eventCache).functionName = "require"; break; case ZEND_REQUIRE_ONCE: - eventCache.functionName = "require_once"; + AIKIDO_GLOBAL(eventCache).functionName = "require_once"; break; default: return original_file_compilation_handler(file_handle, type); } - ScopedTimer scopedTimer(eventCache.functionName, "fs_op"); + ScopedTimer scopedTimer(AIKIDO_GLOBAL(eventCache).functionName, "fs_op"); char* filename = PHP_GET_CHAR_PTR(file_handle->filename); - AIKIDO_LOG_DEBUG("\"%s\" called for \"%s\"!\n", eventCache.functionName.c_str(), filename); + AIKIDO_LOG_DEBUG("\"%s\" called for \"%s\"!\n", AIKIDO_GLOBAL(eventCache).functionName.c_str(), filename); EVENT_ID eventId = NO_EVENT_ID; helper_handle_pre_file_path_access(filename, eventId); - if (aikido_process_event(eventId, eventCache.functionName) == BLOCK) { + if (aikido_process_event(eventId, AIKIDO_GLOBAL(eventCache).functionName) == BLOCK) { // exit zend_compile_file handler and do not call the original handler, thus blocking the script file compilation return nullptr; } @@ -39,7 +39,7 @@ zend_op_array* handle_file_compilation(zend_file_handle* file_handle, int type) eventId = NO_EVENT_ID; helper_handle_post_file_path_access(eventId); - aikido_process_event(eventId, eventCache.functionName); + aikido_process_event(eventId, AIKIDO_GLOBAL(eventCache).functionName); return op_array; } diff --git a/lib/php-extension/HandlePathAccess.cpp b/lib/php-extension/HandlePathAccess.cpp index f621ff97c..f84ee28fa 100644 --- a/lib/php-extension/HandlePathAccess.cpp +++ b/lib/php-extension/HandlePathAccess.cpp @@ -30,23 +30,23 @@ void helper_handle_pre_file_path_access(char *filename, EVENT_ID &eventId) { if (StartsWith(filenameString, "http://", false) || StartsWith(filenameString, "https://", false)) { eventId = EVENT_PRE_OUTGOING_REQUEST; - eventCache.outgoingRequestUrl = filenameString; + AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = filenameString; } else { eventId = EVENT_PRE_PATH_ACCESSED; - eventCache.filename = filenameString; + AIKIDO_GLOBAL(eventCache).filename = filenameString; } } /* Helper for handle post file path access */ void helper_handle_post_file_path_access(EVENT_ID &eventId) { - if (!eventCache.outgoingRequestUrl.empty()) { + if (!AIKIDO_GLOBAL(eventCache).outgoingRequestUrl.empty()) { // If the pre handler for path access determined this was actually an URL, // we need to notify that the request finished. eventId = EVENT_POST_OUTGOING_REQUEST; // As we cannot extract the effective URL for these fopen wrappers, // we will just assume it's the same as the initial URL. - eventCache.outgoingRequestEffectiveUrl = eventCache.outgoingRequestUrl; + AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl = AIKIDO_GLOBAL(eventCache).outgoingRequestUrl; } } @@ -92,7 +92,7 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_file_path_access_2) { helper_handle_pre_file_path_access(ZSTR_VAL(filename), eventId); if (filename2) { - eventCache.filename2 = ZSTR_VAL(filename2); + AIKIDO_GLOBAL(eventCache).filename2 = ZSTR_VAL(filename2); } } diff --git a/lib/php-extension/HandleQueries.cpp b/lib/php-extension/HandleQueries.cpp index a0ddfbb3c..d3fb9b1c8 100644 --- a/lib/php-extension/HandleQueries.cpp +++ b/lib/php-extension/HandleQueries.cpp @@ -24,9 +24,9 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_query) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - eventCache.moduleName = "PDO"; - eventCache.sqlQuery = ZSTR_VAL(query); - eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); + AIKIDO_GLOBAL(eventCache).moduleName = "PDO"; + AIKIDO_GLOBAL(eventCache).sqlQuery = ZSTR_VAL(query); + AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); } AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_exec) { @@ -47,9 +47,9 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_exec) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - eventCache.moduleName = "PDO"; - eventCache.sqlQuery = ZSTR_VAL(query); - eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); + AIKIDO_GLOBAL(eventCache).moduleName = "PDO"; + AIKIDO_GLOBAL(eventCache).sqlQuery = ZSTR_VAL(query); + AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); } AIKIDO_HANDLER_FUNCTION(handle_pre_pdostatement_execute) { @@ -66,11 +66,11 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdostatement_execute) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - eventCache.moduleName = "PDOStatement"; - eventCache.sqlQuery = PHP_GET_CHAR_PTR(stmt->query_string); + AIKIDO_GLOBAL(eventCache).moduleName = "PDOStatement"; + AIKIDO_GLOBAL(eventCache).sqlQuery = PHP_GET_CHAR_PTR(stmt->query_string); zval *pdo_object = &stmt->database_object_handle; - eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); + AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); } zend_class_entry* helper_load_mysqli_link_class_entry() { @@ -109,7 +109,7 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_mysqli_query){ scopedTimer.SetSink(sink, "sql_op"); eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - eventCache.moduleName = "mysqli"; - eventCache.sqlQuery = query; - eventCache.sqlDialect = "mysql"; + AIKIDO_GLOBAL(eventCache).moduleName = "mysqli"; + AIKIDO_GLOBAL(eventCache).sqlQuery = query; + AIKIDO_GLOBAL(eventCache).sqlDialect = "mysql"; } diff --git a/lib/php-extension/HandleRateLimitGroup.cpp b/lib/php-extension/HandleRateLimitGroup.cpp index 9a6dc152b..c57661aa5 100644 --- a/lib/php-extension/HandleRateLimitGroup.cpp +++ b/lib/php-extension/HandleRateLimitGroup.cpp @@ -17,11 +17,11 @@ ZEND_FUNCTION(set_rate_limit_group) { RETURN_BOOL(false); } - requestCache.rateLimitGroup = std::string(group, groupLength); + AIKIDO_GLOBAL(requestCache).rateLimitGroup = std::string(group, groupLength); std::string outputEvent; - requestProcessor.SendEvent(EVENT_SET_RATE_LIMIT_GROUP, outputEvent); - AIKIDO_LOG_DEBUG("Set rate limit group to %s\n", requestCache.rateLimitGroup.c_str()); + AIKIDO_GLOBAL(requestProcessor).SendEvent(EVENT_SET_RATE_LIMIT_GROUP, outputEvent); + AIKIDO_LOG_DEBUG("Set rate limit group to %s\n", AIKIDO_GLOBAL(requestCache).rateLimitGroup.c_str()); RETURN_BOOL(true); } \ No newline at end of file diff --git a/lib/php-extension/HandleSetToken.cpp b/lib/php-extension/HandleSetToken.cpp index 802903e94..c59105b49 100644 --- a/lib/php-extension/HandleSetToken.cpp +++ b/lib/php-extension/HandleSetToken.cpp @@ -19,6 +19,6 @@ ZEND_FUNCTION(set_token) { RETURN_BOOL(false); } - requestProcessor.LoadConfigWithTokenFromPHPSetToken(std::string(token, tokenLength)); + AIKIDO_GLOBAL(requestProcessor).LoadConfigWithTokenFromPHPSetToken(std::string(token, tokenLength)); RETURN_BOOL(true); } diff --git a/lib/php-extension/HandleShellExecution.cpp b/lib/php-extension/HandleShellExecution.cpp index e780820ba..e6023b6b3 100644 --- a/lib/php-extension/HandleShellExecution.cpp +++ b/lib/php-extension/HandleShellExecution.cpp @@ -1,7 +1,7 @@ #include "Includes.h" void helper_handle_pre_shell_execution(std::string cmd, EVENT_ID &eventId) { - eventCache.cmd = cmd; + AIKIDO_GLOBAL(eventCache).cmd = cmd; eventId = EVENT_PRE_SHELL_EXECUTED; } diff --git a/lib/php-extension/HandleShouldBlockRequest.cpp b/lib/php-extension/HandleShouldBlockRequest.cpp index 74ab78cb9..200dec9c9 100644 --- a/lib/php-extension/HandleShouldBlockRequest.cpp +++ b/lib/php-extension/HandleShouldBlockRequest.cpp @@ -2,13 +2,13 @@ zend_class_entry *blockingStatusClass = nullptr; -// This variable is used to check if auto_block_request function has already been called, -// in order to avoid multiple calls to this function. -bool checkedAutoBlock = false; +// The checkedAutoBlock module global variable is used to check if auto_block_request function +// has already been called, in order to avoid multiple calls to this function. +// Accessed via AIKIDO_GLOBAL(checkedAutoBlock). -// This variable is used to check if should_block_request function has already been called, -// in order to avoid multiple calls to this function. -bool checkedShouldBlockRequest = false; +// The checkedShouldBlockRequest module global variable is used to check if should_block_request +// function has already been called, in order to avoid multiple calls to this function. +// Accessed via AIKIDO_GLOBAL(checkedShouldBlockRequest). bool CheckBlocking(EVENT_ID eventId, bool& checkedBlocking) { if (checkedBlocking) { @@ -19,8 +19,8 @@ bool CheckBlocking(EVENT_ID eventId, bool& checkedBlocking) { try { std::string output; - requestProcessor.SendEvent(eventId, output); - action.Execute(output); + AIKIDO_GLOBAL(requestProcessor).SendEvent(eventId, output); + AIKIDO_GLOBAL(action).Execute(output); checkedBlocking = true; return true; } catch (const std::exception &e) { @@ -43,7 +43,7 @@ ZEND_FUNCTION(should_block_request) { return; } - if (!CheckBlocking(EVENT_GET_BLOCKING_STATUS, checkedShouldBlockRequest)) { + if (!CheckBlocking(EVENT_GET_BLOCKING_STATUS, AIKIDO_GLOBAL(checkedShouldBlockRequest))) { return; } @@ -56,12 +56,12 @@ ZEND_FUNCTION(should_block_request) { #else zval *obj = return_value; #endif - zend_update_property_bool(blockingStatusClass, obj, "block", sizeof("block") - 1, action.Block()); - zend_update_property_string(blockingStatusClass, obj, "type", sizeof("type") - 1, action.Type()); - zend_update_property_string(blockingStatusClass, obj, "trigger", sizeof("trigger") - 1, action.Trigger()); - zend_update_property_string(blockingStatusClass, obj, "description", sizeof("description") - 1, action.Description()); - zend_update_property_string(blockingStatusClass, obj, "ip", sizeof("ip") - 1, action.Ip()); - zend_update_property_string(blockingStatusClass, obj, "user_agent", sizeof("user_agent") - 1, action.UserAgent()); + zend_update_property_bool(blockingStatusClass, obj, "block", sizeof("block") - 1, AIKIDO_GLOBAL(action).Block()); + zend_update_property_string(blockingStatusClass, obj, "type", sizeof("type") - 1, AIKIDO_GLOBAL(action).Type()); + zend_update_property_string(blockingStatusClass, obj, "trigger", sizeof("trigger") - 1, AIKIDO_GLOBAL(action).Trigger()); + zend_update_property_string(blockingStatusClass, obj, "description", sizeof("description") - 1, AIKIDO_GLOBAL(action).Description()); + zend_update_property_string(blockingStatusClass, obj, "ip", sizeof("ip") - 1, AIKIDO_GLOBAL(action).Ip()); + zend_update_property_string(blockingStatusClass, obj, "user_agent", sizeof("user_agent") - 1, AIKIDO_GLOBAL(action).UserAgent()); } ZEND_FUNCTION(auto_block_request) { @@ -74,7 +74,7 @@ ZEND_FUNCTION(auto_block_request) { return; } - CheckBlocking(EVENT_GET_AUTO_BLOCKING_STATUS, checkedAutoBlock); + CheckBlocking(EVENT_GET_AUTO_BLOCKING_STATUS, AIKIDO_GLOBAL(checkedAutoBlock)); } void RegisterAikidoBlockRequestStatusClass() { diff --git a/lib/php-extension/HandleUrls.cpp b/lib/php-extension/HandleUrls.cpp index 977d91d95..53bf2b88a 100644 --- a/lib/php-extension/HandleUrls.cpp +++ b/lib/php-extension/HandleUrls.cpp @@ -13,31 +13,31 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_curl_exec) { #endif ZEND_PARSE_PARAMETERS_END(); - eventCache.outgoingRequestUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); - eventCache.outgoingRequestPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); + AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); + AIKIDO_GLOBAL(eventCache).outgoingRequestPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); // if requestCache.outgoingRequestUrl is not empty, we check if it's a redirect - if (!requestCache.outgoingRequestUrl.empty()) { - json outgoingRequestUrlJson = CallPhpFunctionParseUrl(eventCache.outgoingRequestUrl); - json outgoingRequestRedirectUrlJson = CallPhpFunctionParseUrl(requestCache.outgoingRequestRedirectUrl); + if (!AIKIDO_GLOBAL(requestCache).outgoingRequestUrl.empty()) { + json outgoingRequestUrlJson = CallPhpFunctionParseUrl(AIKIDO_GLOBAL(eventCache).outgoingRequestUrl); + json outgoingRequestRedirectUrlJson = CallPhpFunctionParseUrl(AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl); // if the host and port are the same, we use the initial URL, otherwise we use the effective URL if (!outgoingRequestUrlJson.empty() && !outgoingRequestRedirectUrlJson.empty() && outgoingRequestUrlJson["host"] == outgoingRequestRedirectUrlJson["host"] && outgoingRequestUrlJson["port"] == outgoingRequestRedirectUrlJson["port"]) { - eventCache.outgoingRequestUrl = requestCache.outgoingRequestUrl; + AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = AIKIDO_GLOBAL(requestCache).outgoingRequestUrl; } else { // if previous outgoingRequestRedirectUrl it's different from outgoingRequestUrl it means that it's a new request // so we reset the outgoingRequestUrl - requestCache.outgoingRequestUrl = ""; + AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = ""; } } - if (eventCache.outgoingRequestUrl.empty()) return; + if (AIKIDO_GLOBAL(eventCache).outgoingRequestUrl.empty()) return; eventId = EVENT_PRE_OUTGOING_REQUEST; - eventCache.moduleName = "curl"; + AIKIDO_GLOBAL(eventCache).moduleName = "curl"; } AIKIDO_HANDLER_FUNCTION(handle_post_curl_exec) { @@ -56,24 +56,24 @@ AIKIDO_HANDLER_FUNCTION(handle_post_curl_exec) { eventId = EVENT_POST_OUTGOING_REQUEST; - eventCache.moduleName = "curl"; - eventCache.outgoingRequestEffectiveUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); - eventCache.outgoingRequestEffectiveUrlPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); - eventCache.outgoingRequestResolvedIp = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_IP); + AIKIDO_GLOBAL(eventCache).moduleName = "curl"; + AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); + AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrlPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); + AIKIDO_GLOBAL(eventCache).outgoingRequestResolvedIp = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_IP); std::string outgoingRequestResponseCode = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_RESPONSE_CODE); // if outgoingRequestResponseCode starts with 3, it's a redirect if (!outgoingRequestResponseCode.empty() && outgoingRequestResponseCode.substr(0, 1) == "3") { - requestCache.outgoingRequestRedirectUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_REDIRECT_URL); + AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_REDIRECT_URL); // if it's the first redirect - if (requestCache.outgoingRequestUrl.empty()) { - requestCache.outgoingRequestUrl = eventCache.outgoingRequestEffectiveUrl; + if (AIKIDO_GLOBAL(requestCache).outgoingRequestUrl.empty()) { + AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl; } } else { - requestCache.outgoingRequestUrl = ""; - requestCache.outgoingRequestRedirectUrl = ""; + AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = ""; + AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl = ""; } } diff --git a/lib/php-extension/HandleUsers.cpp b/lib/php-extension/HandleUsers.cpp index 47f535618..28b7e4dc4 100644 --- a/lib/php-extension/HandleUsers.cpp +++ b/lib/php-extension/HandleUsers.cpp @@ -1,13 +1,13 @@ #include "Includes.h" bool SendUserEvent(std::string id, std::string username) { - requestCache.userId = id; - requestCache.userName = username; + AIKIDO_GLOBAL(requestCache).userId = id; + AIKIDO_GLOBAL(requestCache).userName = username; try { std::string output; - requestProcessor.SendEvent(EVENT_SET_USER, output); - action.Execute(output); + AIKIDO_GLOBAL(requestProcessor).SendEvent(EVENT_SET_USER, output); + AIKIDO_GLOBAL(action).Execute(output); return true; } catch (const std::exception &e) { AIKIDO_LOG_ERROR("Exception encountered in processing user event: %s\n", e.what()); diff --git a/lib/php-extension/HookAst.cpp b/lib/php-extension/HookAst.cpp index c78815496..19f14d143 100644 --- a/lib/php-extension/HookAst.cpp +++ b/lib/php-extension/HookAst.cpp @@ -1,8 +1,5 @@ #include "Includes.h" -HashTable *global_ast_to_clean; -ZEND_API void (*original_ast_process)(zend_ast *ast) = nullptr; - /* This is a custom destructor, used for cleaning the allocated ast hashtable. This is needed because the ast hashtable is not cleaned by the zend_ast_process function. @@ -13,9 +10,9 @@ void ast_to_clean_dtor(zval *zv) { } void ensure_ast_hashtable_initialized() { - if (!global_ast_to_clean) { - ALLOC_HASHTABLE(global_ast_to_clean); - zend_hash_init(global_ast_to_clean, 8, NULL, ast_to_clean_dtor, 1); + if (!AIKIDO_GLOBAL(global_ast_to_clean)) { + ALLOC_HASHTABLE(AIKIDO_GLOBAL(global_ast_to_clean)); + zend_hash_init(AIKIDO_GLOBAL(global_ast_to_clean), 8, NULL, ast_to_clean_dtor, 1); } } @@ -31,14 +28,14 @@ zend_ast *create_ast_call(const char *name) { name_var->kind = ZEND_AST_ZVAL; ZVAL_STRING(&name_var->val, name); name_var->val.u2.lineno = 0; - zend_hash_next_index_insert_ptr(global_ast_to_clean, name_var); + zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), name_var); // Create empty argument list arg_list = (zend_ast_list*)emalloc(sizeof(zend_ast_list)); arg_list->kind = ZEND_AST_ARG_LIST; arg_list->lineno = 0; arg_list->children = 0; - zend_hash_next_index_insert_ptr(global_ast_to_clean, arg_list); + zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), arg_list); // Create function call node call = (zend_ast*)emalloc(sizeof(zend_ast) + sizeof(zend_ast*)); @@ -46,7 +43,7 @@ zend_ast *create_ast_call(const char *name) { call->lineno = 0; call->child[0] = (zend_ast*)name_var; call->child[1] = (zend_ast*)arg_list; - zend_hash_next_index_insert_ptr(global_ast_to_clean, call); + zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), call); return call; } @@ -108,7 +105,7 @@ void insert_call_to_ast(zend_ast *ast) { block->children = 2; block->child[0] = call; block->child[1] = stmt_list->child[insertion_point]; - zend_hash_next_index_insert_ptr(global_ast_to_clean, block); + zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), block); stmt_list->child[insertion_point] = (zend_ast*)block; } @@ -116,39 +113,39 @@ void insert_call_to_ast(zend_ast *ast) { void aikido_ast_process(zend_ast *ast) { insert_call_to_ast(ast); - if(original_ast_process){ - original_ast_process(ast); + if(AIKIDO_GLOBAL(original_ast_process)){ + AIKIDO_GLOBAL(original_ast_process)(ast); } } void HookAstProcess() { - if (original_ast_process) { - AIKIDO_LOG_WARN("\"zend_ast_process\" already hooked (original handler %p)!\n", original_ast_process); + if (AIKIDO_GLOBAL(original_ast_process)) { + AIKIDO_LOG_WARN("\"zend_ast_process\" already hooked (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); return; } - original_ast_process = zend_ast_process; + AIKIDO_GLOBAL(original_ast_process) = zend_ast_process; zend_ast_process = aikido_ast_process; - AIKIDO_LOG_INFO("Hooked \"zend_ast_process\" (original handler %p)!\n", original_ast_process); + AIKIDO_LOG_INFO("Hooked \"zend_ast_process\" (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); } void UnhookAstProcess() { - AIKIDO_LOG_INFO("Unhooked \"zend_ast_process\" (original handler %p)!\n", original_ast_process); + AIKIDO_LOG_INFO("Unhooked \"zend_ast_process\" (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); // As it's not mandatory to have a zend_ast_process installed, we need to ensure UnhookAstProcess() restores zend_ast_process even if the original was NULL // Only unhook if the current handler is still ours, avoiding clobbering others if (zend_ast_process == aikido_ast_process){ - zend_ast_process = original_ast_process; + zend_ast_process = AIKIDO_GLOBAL(original_ast_process); } - original_ast_process = nullptr; + AIKIDO_GLOBAL(original_ast_process) = nullptr; } void DestroyAstToClean() { - if (global_ast_to_clean) { - zend_hash_destroy(global_ast_to_clean); - FREE_HASHTABLE(global_ast_to_clean); - global_ast_to_clean = nullptr; + if (AIKIDO_GLOBAL(global_ast_to_clean)) { + zend_hash_destroy(AIKIDO_GLOBAL(global_ast_to_clean)); + FREE_HASHTABLE(AIKIDO_GLOBAL(global_ast_to_clean)); + AIKIDO_GLOBAL(global_ast_to_clean) = nullptr; } } \ No newline at end of file diff --git a/lib/php-extension/Log.cpp b/lib/php-extension/Log.cpp index be4c04209..2400bfbf9 100644 --- a/lib/php-extension/Log.cpp +++ b/lib/php-extension/Log.cpp @@ -37,7 +37,7 @@ void Log::Write(AIKIDO_LOG_LEVEL level, const char* format, ...) { return; } - fprintf(logFile, "[AIKIDO][%s][%d][%s] ", ToString(level).c_str(), getpid(), GetTime().c_str()); + fprintf(logFile, "[AIKIDO][%s][%d][%ld][%s] ", ToString(level).c_str(), getpid(), (long)GetThreadID(), GetTime().c_str()); va_list args; va_start(args, format); diff --git a/lib/php-extension/Packages.cpp b/lib/php-extension/Packages.cpp index b1e0386ee..3fa2e98df 100644 --- a/lib/php-extension/Packages.cpp +++ b/lib/php-extension/Packages.cpp @@ -58,7 +58,7 @@ std::string GetComposerPackageVersion(const std::string& version) { unordered_map GetComposerPackages() { unordered_map packages; - std::string docRoot = server.GetVar("DOCUMENT_ROOT"); + std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); if (docRoot.empty()) { return packages; } diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index 471bd6f85..ca814f3cf 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -11,15 +11,15 @@ void PhpLifecycle::ModuleInit() { } void PhpLifecycle::RequestInit() { - action.Reset(); - requestCache.Reset(); - requestProcessor.RequestInit(); - checkedAutoBlock = false; - checkedShouldBlockRequest = false; + AIKIDO_GLOBAL(action).Reset(); + AIKIDO_GLOBAL(requestCache).Reset(); + AIKIDO_GLOBAL(requestProcessor).RequestInit(); + AIKIDO_GLOBAL(checkedAutoBlock) = false; + AIKIDO_GLOBAL(checkedShouldBlockRequest) = false; } void PhpLifecycle::RequestShutdown() { - requestProcessor.RequestShutdown(); + AIKIDO_GLOBAL(requestProcessor).RequestShutdown(); } void PhpLifecycle::ModuleShutdown() { @@ -31,7 +31,7 @@ void PhpLifecycle::ModuleShutdown() { UnhookAll(); } else { AIKIDO_LOG_INFO("Module shutdown NOT called on main PID. Uninitializing Aikido Request Processor...\n"); - requestProcessor.Uninit(); + AIKIDO_GLOBAL(requestProcessor).Uninit(); } } @@ -48,5 +48,3 @@ void PhpLifecycle::UnhookAll() { UnhookFileCompilation(); UnhookAstProcess(); } - -PhpLifecycle phpLifecycle; \ No newline at end of file diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index 6c8218bcb..9beac12f1 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -1,7 +1,5 @@ #include "Includes.h" -RequestProcessor requestProcessor; - std::string RequestProcessor::GetInitData(const std::string& token) { LoadLaravelEnvFile(); LoadEnvironment(); @@ -54,7 +52,7 @@ void RequestProcessor::SendPreRequestEvent() { try { std::string outputEvent; SendEvent(EVENT_PRE_REQUEST, outputEvent); - action.Execute(outputEvent); + AIKIDO_GLOBAL(action).Execute(outputEvent); } catch (const std::exception& e) { AIKIDO_LOG_ERROR("Exception encountered in processing request init metadata: %s\n", e.what()); } @@ -64,7 +62,7 @@ void RequestProcessor::SendPostRequestEvent() { try { std::string outputEvent; SendEvent(EVENT_POST_REQUEST, outputEvent); - action.Execute(outputEvent); + AIKIDO_GLOBAL(action).Execute(outputEvent); } catch (const std::exception& e) { AIKIDO_LOG_ERROR("Exception encountered in processing request shutdown metadata: %s\n", e.what()); } @@ -88,11 +86,22 @@ bool RequestProcessor::IsBlockingEnabled() { bool RequestProcessor::ReportStats() { AIKIDO_LOG_INFO("Reporting stats to Aikido Request Processor...\n"); - for (const auto& [sink, sinkStats] : stats) { + for (std::unordered_map::const_iterator it = AIKIDO_GLOBAL(stats).begin(); it != AIKIDO_GLOBAL(stats).end(); ++it) { + const std::string& sink = it->first; + const SinkStats& sinkStats = it->second; AIKIDO_LOG_INFO("Reporting stats for sink \"%s\" to Aikido Request Processor...\n", sink.c_str()); - requestProcessorReportStatsFn(GoCreateString(sink), GoCreateString(sinkStats.kind), sinkStats.attacksDetected, sinkStats.attacksBlocked, sinkStats.interceptorThrewError, sinkStats.withoutContext, sinkStats.timings.size(), GoCreateSlice(sinkStats.timings)); - } - stats.clear(); + requestProcessorReportStatsFn( + GoCreateString(sink), + GoCreateString(sinkStats.kind), + sinkStats.attacksDetected, + sinkStats.attacksBlocked, + sinkStats.interceptorThrewError, + sinkStats.withoutContext, + static_cast(sinkStats.timings.size()), + GoCreateSlice(sinkStats.timings) + ); + } + AIKIDO_GLOBAL(stats).clear(); return true; } @@ -180,7 +189,7 @@ bool RequestProcessor::RequestInit() { SendPreRequestEvent(); if ((this->numberOfRequests % AIKIDO_GLOBAL(report_stats_interval_to_agent)) == 0) { - requestProcessor.ReportStats(); + AIKIDO_GLOBAL(requestProcessor).ReportStats(); } return true; } diff --git a/lib/php-extension/Server.cpp b/lib/php-extension/Server.cpp index b58462995..6f8bc7a17 100644 --- a/lib/php-extension/Server.cpp +++ b/lib/php-extension/Server.cpp @@ -6,8 +6,6 @@ return ""; \ } -Server server; - /* Always load the current "_SERVER" variable from PHP, so we make sure it's always available and it's the correct one */ zval* Server::GetServerVar() { @@ -128,8 +126,8 @@ std::string Server::GetHeaders() { ZEND_HASH_FOREACH_END(); json headers_json; - for (auto const& [key, val] : headers) { - headers_json[key] = val; + for (std::map::const_iterator it = headers.begin(); it != headers.end(); ++it) { + headers_json[it->first] = it->second; } return NormalizeAndDumpJson(headers_json); } diff --git a/lib/php-extension/Stats.cpp b/lib/php-extension/Stats.cpp index 67647f291..986f68886 100644 --- a/lib/php-extension/Stats.cpp +++ b/lib/php-extension/Stats.cpp @@ -1,29 +1,23 @@ #include "Includes.h" -std::unordered_map stats; - -std::chrono::high_resolution_clock::time_point currentRequestStart = std::chrono::high_resolution_clock::time_point{}; - -uint64_t totalOverheadForCurrentRequest = 0; - inline void AddToStats(const std::string& key, const std::string& kind, uint64_t duration) { - SinkStats& sinkStats = stats[key]; + SinkStats& sinkStats = AIKIDO_GLOBAL(stats)[key]; sinkStats.kind = kind; sinkStats.timings.push_back(duration); } inline void AddRequestTotalToStats() { - if (currentRequestStart == std::chrono::high_resolution_clock::time_point{}) { + if (AIKIDO_GLOBAL(currentRequestStart) == std::chrono::high_resolution_clock::time_point{}) { return; } - uint64_t totalOverhead = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - currentRequestStart).count(); + uint64_t totalOverhead = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - AIKIDO_GLOBAL(currentRequestStart)).count(); AddToStats("request_total", "request_op", totalOverhead); - currentRequestStart = std::chrono::high_resolution_clock::time_point{}; + AIKIDO_GLOBAL(currentRequestStart) = std::chrono::high_resolution_clock::time_point{}; } inline void AddRequestTotalOverheadToStats() { - AddToStats("request_total_overhead", "request_op", totalOverheadForCurrentRequest); - totalOverheadForCurrentRequest = 0; + AddToStats("request_total_overhead", "request_op", AIKIDO_GLOBAL(totalOverheadForCurrentRequest)); + AIKIDO_GLOBAL(totalOverheadForCurrentRequest) = 0; } ScopedTimer::ScopedTimer() { @@ -42,7 +36,7 @@ void ScopedTimer::SetSink(std::string key, std::string kind) { void ScopedTimer::Start() { this->start = std::chrono::high_resolution_clock::now(); if (this->key == "request_init") { - currentRequestStart = this->start; + AIKIDO_GLOBAL(currentRequestStart) = this->start; } } @@ -59,7 +53,7 @@ ScopedTimer::~ScopedTimer() { return; } this->Stop(); - totalOverheadForCurrentRequest += this->duration; + AIKIDO_GLOBAL(totalOverheadForCurrentRequest) += this->duration; if (key == "request_shutdown") { AddRequestTotalOverheadToStats(); AddRequestTotalToStats(); diff --git a/lib/php-extension/Utils.cpp b/lib/php-extension/Utils.cpp index e739ea447..2b87cc0b8 100644 --- a/lib/php-extension/Utils.cpp +++ b/lib/php-extension/Utils.cpp @@ -27,6 +27,13 @@ std::string GetDateTime() { return time_str; } +pid_t GetThreadID() { +#ifdef SYS_gettid + return syscall(SYS_gettid); +#else + return (pid_t)getpid(); // Fallback for non-Linux systems +#endif +} const char* GetEventName(EVENT_ID event) { switch (event) { case EVENT_PRE_REQUEST: diff --git a/lib/php-extension/include/Action.h b/lib/php-extension/include/Action.h index 4492873ed..a0f260230 100644 --- a/lib/php-extension/include/Action.h +++ b/lib/php-extension/include/Action.h @@ -41,5 +41,3 @@ class Action { char* Ip(); char* UserAgent(); }; - -extern Action action; diff --git a/lib/php-extension/include/Cache.h b/lib/php-extension/include/Cache.h index cef568710..135e63d2d 100644 --- a/lib/php-extension/include/Cache.h +++ b/lib/php-extension/include/Cache.h @@ -34,6 +34,3 @@ class EventCache { EventCache() = default; void Reset(); }; - -extern RequestCache requestCache; -extern EventCache eventCache; diff --git a/lib/php-extension/include/HookAst.h b/lib/php-extension/include/HookAst.h index df678c7c2..61ec5828f 100644 --- a/lib/php-extension/include/HookAst.h +++ b/lib/php-extension/include/HookAst.h @@ -1,12 +1,5 @@ #pragma once - -extern HashTable *global_ast_to_clean; -extern ZEND_API void (*original_ast_process)(zend_ast *ast); - -extern bool checkedAutoBlock; -extern bool checkedShouldBlockRequest; - void HookAstProcess(); void UnhookAstProcess(); void DestroyAstToClean(); diff --git a/lib/php-extension/include/Includes.h b/lib/php-extension/include/Includes.h index 0346e13de..5a10967b7 100644 --- a/lib/php-extension/include/Includes.h +++ b/lib/php-extension/include/Includes.h @@ -12,6 +12,7 @@ #include #include #include +#include #include #include @@ -39,18 +40,11 @@ using json = nlohmann::json; #include "GoWrappers.h" #include "../../API.h" -#include "Log.h" -#include "Agent.h" #include "php_aikido.h" #include "Environment.h" -#include "Action.h" -#include "Cache.h" #include "Stats.h" #include "Hooks.h" #include "PhpWrappers.h" -#include "Server.h" -#include "RequestProcessor.h" -#include "PhpLifecycle.h" #include "Packages.h" #include "Utils.h" diff --git a/lib/php-extension/include/PhpLifecycle.h b/lib/php-extension/include/PhpLifecycle.h index 6efcf49c4..78e8c3de0 100644 --- a/lib/php-extension/include/PhpLifecycle.h +++ b/lib/php-extension/include/PhpLifecycle.h @@ -20,5 +20,3 @@ class PhpLifecycle { void UnhookAll(); }; - -extern PhpLifecycle phpLifecycle; diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index de3cd2763..f18addcd8 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -43,5 +43,3 @@ class RequestProcessor { ~RequestProcessor(); }; - -extern RequestProcessor requestProcessor; diff --git a/lib/php-extension/include/Server.h b/lib/php-extension/include/Server.h index 0b1a1070d..28d9e97d9 100644 --- a/lib/php-extension/include/Server.h +++ b/lib/php-extension/include/Server.h @@ -27,5 +27,3 @@ class Server { ~Server() = default; }; - -extern Server server; diff --git a/lib/php-extension/include/Stats.h b/lib/php-extension/include/Stats.h index 5835affc3..f12a21082 100644 --- a/lib/php-extension/include/Stats.h +++ b/lib/php-extension/include/Stats.h @@ -31,4 +31,3 @@ class SinkStats { void IncrementWithoutContext(); }; -extern std::unordered_map stats; diff --git a/lib/php-extension/include/Utils.h b/lib/php-extension/include/Utils.h index 851d495dd..808af192f 100644 --- a/lib/php-extension/include/Utils.h +++ b/lib/php-extension/include/Utils.h @@ -10,6 +10,8 @@ std::string GetTime(); std::string GetDateTime(); +pid_t GetThreadID(); + const char* GetEventName(EVENT_ID event); std::string NormalizeAndDumpJson(const json& jsonStr); diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index ff6d759bf..d99fe5d99 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -1,5 +1,21 @@ #pragma once +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include "php.h" +#include "Log.h" +#include "Agent.h" +#include "Server.h" +#include "RequestProcessor.h" +#include "Action.h" +#include "Cache.h" +#include "PhpLifecycle.h" +#include "Stats.h" + extern zend_module_entry aikido_module_entry; #define phpext_aikido_ptr &aikido_module_entry @@ -26,6 +42,21 @@ std::string endpoint; std::string config_endpoint; Log logger; Agent agent; +Server server; +RequestProcessor requestProcessor; +Action action; +RequestCache requestCache; +EventCache eventCache; +PhpLifecycle phpLifecycle; +std::unordered_map stats; +std::chrono::high_resolution_clock::time_point currentRequestStart; +uint64_t totalOverheadForCurrentRequest; +std::unordered_map laravelEnv; +bool laravelEnvLoaded; +bool checkedAutoBlock; +bool checkedShouldBlockRequest; +HashTable *global_ast_to_clean; +void (*original_ast_process)(zend_ast *ast); ZEND_END_MODULE_GLOBALS(aikido) ZEND_EXTERN_MODULE_GLOBALS(aikido) diff --git a/package/rpm/aikido.spec b/package/rpm/aikido.spec index ff55a46ad..bddf90245 100644 --- a/package/rpm/aikido.spec +++ b/package/rpm/aikido.spec @@ -50,12 +50,25 @@ for php_path in /usr/bin/php* /usr/local/bin/php*; do fi done -if [ ${#PHP_VERSIONS[@]} -eq 0 ]; then - echo "No PHP versions found! Exiting!" - exit 1 +if [ ${#PHP_VERSIONS[@]} -gt 0 ]; then + echo "Found PHP versions: ${PHP_VERSIONS[*]}" fi -echo "Found PHP versions: ${PHP_VERSIONS[*]}" + + +# Check if FrankenPHP is installed +FRANKENPHP_PHP_VERSION="" +if command -v frankenphp -v >/dev/null 2>&1; then + if frankenphp -v >/dev/null 2>&1; then + FRANKENPHP_PHP_VERSION=$(frankenphp -v 2>/dev/null | grep -oP 'PHP \K\d+\.\d+' | head -n 1) + fi + + if [ -n "$FRANKENPHP_PHP_VERSION" ]; then + echo "Found FrankenPHP with embedded PHP $FRANKENPHP_PHP_VERSION" + else + echo "Found FrankenPHP but could not determine PHP version" + fi +fi for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Installing for PHP $PHP_VERSION..." @@ -117,6 +130,39 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do fi done +# Install for FrankenPHP if installed +if [ -n "$FRANKENPHP_PHP_VERSION" ]; then + echo "Installing for FrankenPHP with PHP $FRANKENPHP_PHP_VERSION..." + + FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" + FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" + + # Install Aikido PHP extension for FrankenPHP + if [ -d "$FRANKENPHP_EXT_DIR" ]; then + echo "Installing new Aikido extension in $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." + ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so + else + echo "FrankenPHP extension directory $FRANKENPHP_EXT_DIR not found! Creating it..." + mkdir -p $FRANKENPHP_EXT_DIR + ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so + fi + + # Install Aikido ini file for FrankenPHP + if [ -d "$FRANKENPHP_INI_DIR" ]; then + echo "Installing new Aikido mod in $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." + ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + else + echo "FrankenPHP ini directory $FRANKENPHP_INI_DIR not found! Creating it..." + mkdir -p $FRANKENPHP_INI_DIR + ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + fi +fi + +if [ ${#PHP_VERSIONS[@]} -eq 0 ] && [ -z "$FRANKENPHP_PHP_VERSION" ]; then + echo "No PHP or FrankenPHP found! Exiting!" + exit 1 +fi + mkdir -p /run/aikido-%{version} chmod 777 /run/aikido-%{version} @@ -153,6 +199,14 @@ done echo "Found PHP versions: ${PHP_VERSIONS[*]}" +# Check if FrankenPHP directories exist for uninstall +FRANKENPHP_INSTALLED=false +FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" +if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then + FRANKENPHP_INSTALLED=true +fi + for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Uninstalling for PHP $PHP_VERSION..." @@ -207,6 +261,23 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do fi done +# Uninstall for FrankenPHP if directories exist +FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" +if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then + echo "Uninstalling for FrankenPHP..." + + if [ -f "$FRANKENPHP_EXT_DIR/aikido-%{version}.so" ]; then + echo "Uninstalling Aikido extension from $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." + rm -f $FRANKENPHP_EXT_DIR/aikido-%{version}.so + fi + + if [ -f "$FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini" ]; then + echo "Uninstalling Aikido mod from $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." + rm -f $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + fi +fi + # Remove the Aikido logs folder rm -rf /var/log/aikido-%{version} diff --git a/tools/server_tests/php_built_in/main.py b/tools/server_tests/php_built_in/main.py index af76a929a..abc079463 100644 --- a/tools/server_tests/php_built_in/main.py +++ b/tools/server_tests/php_built_in/main.py @@ -10,5 +10,5 @@ def php_built_in_start_server(test_data, test_lib_dir, valgrind): return subprocess.Popen( php_server_process_cmd, - env=test_data["env"] + env=dict(os.environ, **test_data["env"]) ) From cc173d4a0e69656a9183716a7e83d5779e9e8d78 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 7 Nov 2025 17:52:49 +0000 Subject: [PATCH 002/170] Refactor global variable access to use local references for improved readability and performance in multiple files --- lib/php-extension/Environment.cpp | 19 ++++--- lib/php-extension/GoWrappers.cpp | 56 ++++++++++--------- lib/php-extension/Handle.cpp | 21 ++++--- lib/php-extension/HandleFileCompilation.cpp | 19 ++++--- lib/php-extension/HandlePathAccess.cpp | 12 ++-- lib/php-extension/HandleQueries.cpp | 28 ++++++---- lib/php-extension/HandleRateLimitGroup.cpp | 5 +- .../HandleShouldBlockRequest.cpp | 19 ++++--- lib/php-extension/HandleUrls.cpp | 42 ++++++++------ lib/php-extension/HandleUsers.cpp | 11 ++-- lib/php-extension/HookAst.cpp | 47 +++++++++------- lib/php-extension/Log.cpp | 2 +- lib/php-extension/RequestProcessor.cpp | 20 ++++--- lib/php-extension/include/Includes.h | 1 + 14 files changed, 173 insertions(+), 129 deletions(-) diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 7e2dc7ea6..187b2884d 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -97,13 +97,14 @@ bool LoadLaravelEnvFile() { } std::string GetLaravelEnvVariable(const std::string& env_key) { - if (AIKIDO_GLOBAL(laravelEnv).find(env_key) != AIKIDO_GLOBAL(laravelEnv).end()) { + const auto& laravelEnv = AIKIDO_GLOBAL(laravelEnv); + if (laravelEnv.find(env_key) != laravelEnv.end()) { if (env_key == "AIKIDO_TOKEN") { - AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AnonymizeToken(AIKIDO_GLOBAL(laravelEnv)[env_key]).c_str()); + AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AnonymizeToken(laravelEnv.at(env_key)).c_str()); } else { - AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), AIKIDO_GLOBAL(laravelEnv)[env_key].c_str()); + AIKIDO_LOG_DEBUG("laravel_env[%s] = %s\n", env_key.c_str(), laravelEnv.at(env_key).c_str()); } - return AIKIDO_GLOBAL(laravelEnv)[env_key]; + return laravelEnv.at(env_key); } return ""; } @@ -166,12 +167,14 @@ unsigned int GetEnvNumber(const std::string& env_key, unsigned int default_value } void LoadEnvironment() { + auto& logLevelStr = AIKIDO_GLOBAL(log_level_str); + auto& logLevel = AIKIDO_GLOBAL(log_level); if (GetEnvBool("AIKIDO_DEBUG", false)) { - AIKIDO_GLOBAL(log_level_str) = "DEBUG"; - AIKIDO_GLOBAL(log_level) = AIKIDO_LOG_LEVEL_DEBUG; + logLevelStr = "DEBUG"; + logLevel = AIKIDO_LOG_LEVEL_DEBUG; } else { - AIKIDO_GLOBAL(log_level_str) = GetEnvString("AIKIDO_LOG_LEVEL", "WARN"); - AIKIDO_GLOBAL(log_level) = Log::ToLevel(AIKIDO_GLOBAL(log_level_str)); + logLevelStr = GetEnvString("AIKIDO_LOG_LEVEL", "WARN"); + logLevel = Log::ToLevel(logLevelStr); } AIKIDO_GLOBAL(blocking) = GetEnvBool("AIKIDO_BLOCK", false) || GetEnvBool("AIKIDO_BLOCKING", false);; diff --git a/lib/php-extension/GoWrappers.cpp b/lib/php-extension/GoWrappers.cpp index 71d0a5dfc..a55d7c947 100644 --- a/lib/php-extension/GoWrappers.cpp +++ b/lib/php-extension/GoWrappers.cpp @@ -16,83 +16,87 @@ char* GoContextCallback(int callbackId) { std::string ctx; std::string ret; + auto& server = AIKIDO_GLOBAL(server); + const auto& requestCache = AIKIDO_GLOBAL(requestCache); + const auto& eventCache = AIKIDO_GLOBAL(eventCache); + try { switch (callbackId) { case CONTEXT_REMOTE_ADDRESS: ctx = "REMOTE_ADDRESS"; - ret = AIKIDO_GLOBAL(server).GetVar("REMOTE_ADDR"); + ret = server.GetVar("REMOTE_ADDR"); break; case CONTEXT_METHOD: ctx = "METHOD"; - ret = AIKIDO_GLOBAL(server).GetVar("REQUEST_METHOD"); + ret = server.GetVar("REQUEST_METHOD"); break; case CONTEXT_ROUTE: ctx = "ROUTE"; - ret = AIKIDO_GLOBAL(server).GetRoute(); + ret = server.GetRoute(); break; case CONTEXT_STATUS_CODE: ctx = "STATUS_CODE"; - ret = AIKIDO_GLOBAL(server).GetStatusCode(); + ret = server.GetStatusCode(); break; case CONTEXT_BODY: ctx = "BODY"; - ret = AIKIDO_GLOBAL(server).GetBody(); + ret = server.GetBody(); break; case CONTEXT_HEADER_X_FORWARDED_FOR: ctx = "HEADER_X_FORWARDED_FOR"; - ret = AIKIDO_GLOBAL(server).GetVar("HTTP_X_FORWARDED_FOR"); + ret = server.GetVar("HTTP_X_FORWARDED_FOR"); break; case CONTEXT_COOKIES: ctx = "COOKIES"; - ret = AIKIDO_GLOBAL(server).GetVar("HTTP_COOKIE"); + ret = server.GetVar("HTTP_COOKIE"); break; case CONTEXT_QUERY: ctx = "QUERY"; - ret = AIKIDO_GLOBAL(server).GetQuery(); + ret = server.GetQuery(); break; case CONTEXT_HTTPS: ctx = "HTTPS"; - ret = AIKIDO_GLOBAL(server).GetVar("HTTPS"); + ret = server.GetVar("HTTPS"); break; case CONTEXT_URL: ctx = "URL"; - ret = AIKIDO_GLOBAL(server).GetUrl(); + ret = server.GetUrl(); break; case CONTEXT_HEADERS: ctx = "HEADERS"; - ret = AIKIDO_GLOBAL(server).GetHeaders(); + ret = server.GetHeaders(); break; case CONTEXT_HEADER_USER_AGENT: ctx = "USER_AGENT"; - ret = AIKIDO_GLOBAL(server).GetVar("HTTP_USER_AGENT"); + ret = server.GetVar("HTTP_USER_AGENT"); break; case CONTEXT_USER_ID: ctx = "USER_ID"; - ret = AIKIDO_GLOBAL(requestCache).userId; + ret = requestCache.userId; break; case CONTEXT_USER_NAME: ctx = "USER_NAME"; - ret = AIKIDO_GLOBAL(requestCache).userName; + ret = requestCache.userName; break; case CONTEXT_RATE_LIMIT_GROUP: ctx = "RATE_LIMIT_GROUP"; - ret = AIKIDO_GLOBAL(requestCache).rateLimitGroup; + ret = requestCache.rateLimitGroup; break; case FUNCTION_NAME: ctx = "FUNCTION_NAME"; - ret = AIKIDO_GLOBAL(eventCache).functionName; + ret = eventCache.functionName; break; case OUTGOING_REQUEST_URL: ctx = "OUTGOING_REQUEST_URL"; - ret = AIKIDO_GLOBAL(eventCache).outgoingRequestUrl; + ret = eventCache.outgoingRequestUrl; break; case OUTGOING_REQUEST_EFFECTIVE_URL: ctx = "OUTGOING_REQUEST_EFFECTIVE_URL"; - ret = AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl; + ret = eventCache.outgoingRequestEffectiveUrl; break; case OUTGOING_REQUEST_PORT: ctx = "OUTGOING_REQUEST_PORT"; - ret = AIKIDO_GLOBAL(eventCache).outgoingRequestPort; + ret = eventCache.outgoingRequestPort; break; case OUTGOING_REQUEST_EFFECTIVE_URL_PORT: ctx = "OUTGOING_REQUEST_EFFECTIVE_URL_PORT"; @@ -100,31 +104,31 @@ char* GoContextCallback(int callbackId) { break; case OUTGOING_REQUEST_RESOLVED_IP: ctx = "OUTGOING_REQUEST_RESOLVED_IP"; - ret = AIKIDO_GLOBAL(eventCache).outgoingRequestResolvedIp; + ret = eventCache.outgoingRequestResolvedIp; break; case CMD: ctx = "CMD"; - ret = AIKIDO_GLOBAL(eventCache).cmd; + ret = eventCache.cmd; break; case FILENAME: ctx = "FILENAME"; - ret = AIKIDO_GLOBAL(eventCache).filename; + ret = eventCache.filename; break; case FILENAME2: ctx = "FILENAME2"; - ret = AIKIDO_GLOBAL(eventCache).filename2; + ret = eventCache.filename2; break; case SQL_QUERY: ctx = "SQL_QUERY"; - ret = AIKIDO_GLOBAL(eventCache).sqlQuery; + ret = eventCache.sqlQuery; break; case SQL_DIALECT: ctx = "SQL_DIALECT"; - ret = AIKIDO_GLOBAL(eventCache).sqlDialect; + ret = eventCache.sqlDialect; break; case MODULE: ctx = "MODULE"; - ret = AIKIDO_GLOBAL(eventCache).moduleName; + ret = eventCache.moduleName; break; case STACK_TRACE: ctx = "STACK_TRACE"; diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index e91783f3d..2c5c66ea2 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -6,20 +6,24 @@ ACTION_STATUS aikido_process_event(EVENT_ID& eventId, std::string& sink) { return CONTINUE; } + auto& requestProcessor = AIKIDO_GLOBAL(requestProcessor); + auto& action = AIKIDO_GLOBAL(action); + auto& statsMap = AIKIDO_GLOBAL(stats); + std::string outputEvent; - AIKIDO_GLOBAL(requestProcessor).SendEvent(eventId, outputEvent); + requestProcessor.SendEvent(eventId, outputEvent); - if (AIKIDO_GLOBAL(action).IsDetection(outputEvent)) { - AIKIDO_GLOBAL(stats)[sink].IncrementAttacksDetected(); + if (action.IsDetection(outputEvent)) { + statsMap[sink].IncrementAttacksDetected(); } - if (!AIKIDO_GLOBAL(requestProcessor).IsBlockingEnabled()) { + if (!requestProcessor.IsBlockingEnabled()) { return CONTINUE; } - ACTION_STATUS action_status = AIKIDO_GLOBAL(action).Execute(outputEvent); + ACTION_STATUS action_status = action.Execute(outputEvent); if (action_status == BLOCK) { - AIKIDO_GLOBAL(stats)[sink].IncrementAttacksBlocked(); + statsMap[sink].IncrementAttacksBlocked(); } return action_status; } @@ -36,8 +40,9 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { std::string outputEvent; bool caughtException = false; - AIKIDO_GLOBAL(eventCache).Reset(); - AIKIDO_GLOBAL(eventCache).functionName = ZSTR_VAL(execute_data->func->common.function_name); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.Reset(); + eventCache.functionName = ZSTR_VAL(execute_data->func->common.function_name); try { zend_execute_data* exec_data = EG(current_execute_data); diff --git a/lib/php-extension/HandleFileCompilation.cpp b/lib/php-extension/HandleFileCompilation.cpp index a47d15c84..e37e9005b 100644 --- a/lib/php-extension/HandleFileCompilation.cpp +++ b/lib/php-extension/HandleFileCompilation.cpp @@ -1,34 +1,35 @@ #include "Includes.h" zend_op_array* handle_file_compilation(zend_file_handle* file_handle, int type) { - AIKIDO_GLOBAL(eventCache).Reset(); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.Reset(); switch (type) { case ZEND_INCLUDE: - AIKIDO_GLOBAL(eventCache).functionName = "include"; + eventCache.functionName = "include"; break; case ZEND_INCLUDE_ONCE: - AIKIDO_GLOBAL(eventCache).functionName = "include_once"; + eventCache.functionName = "include_once"; break; case ZEND_REQUIRE: - AIKIDO_GLOBAL(eventCache).functionName = "require"; + eventCache.functionName = "require"; break; case ZEND_REQUIRE_ONCE: - AIKIDO_GLOBAL(eventCache).functionName = "require_once"; + eventCache.functionName = "require_once"; break; default: return original_file_compilation_handler(file_handle, type); } - ScopedTimer scopedTimer(AIKIDO_GLOBAL(eventCache).functionName, "fs_op"); + ScopedTimer scopedTimer(eventCache.functionName, "fs_op"); char* filename = PHP_GET_CHAR_PTR(file_handle->filename); - AIKIDO_LOG_DEBUG("\"%s\" called for \"%s\"!\n", AIKIDO_GLOBAL(eventCache).functionName.c_str(), filename); + AIKIDO_LOG_DEBUG("\"%s\" called for \"%s\"!\n", eventCache.functionName.c_str(), filename); EVENT_ID eventId = NO_EVENT_ID; helper_handle_pre_file_path_access(filename, eventId); - if (aikido_process_event(eventId, AIKIDO_GLOBAL(eventCache).functionName) == BLOCK) { + if (aikido_process_event(eventId, eventCache.functionName) == BLOCK) { // exit zend_compile_file handler and do not call the original handler, thus blocking the script file compilation return nullptr; } @@ -39,7 +40,7 @@ zend_op_array* handle_file_compilation(zend_file_handle* file_handle, int type) eventId = NO_EVENT_ID; helper_handle_post_file_path_access(eventId); - aikido_process_event(eventId, AIKIDO_GLOBAL(eventCache).functionName); + aikido_process_event(eventId, eventCache.functionName); return op_array; } diff --git a/lib/php-extension/HandlePathAccess.cpp b/lib/php-extension/HandlePathAccess.cpp index f84ee28fa..3e0c4e207 100644 --- a/lib/php-extension/HandlePathAccess.cpp +++ b/lib/php-extension/HandlePathAccess.cpp @@ -27,26 +27,28 @@ void helper_handle_pre_file_path_access(char *filename, EVENT_ID &eventId) { filenameString = get_resource_or_original_from_php_filter(filenameString); // if filename starts with http:// or https://, it's a URL so we treat it as an outgoing request + auto& eventCache = AIKIDO_GLOBAL(eventCache); if (StartsWith(filenameString, "http://", false) || StartsWith(filenameString, "https://", false)) { eventId = EVENT_PRE_OUTGOING_REQUEST; - AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = filenameString; + eventCache.outgoingRequestUrl = filenameString; } else { eventId = EVENT_PRE_PATH_ACCESSED; - AIKIDO_GLOBAL(eventCache).filename = filenameString; + eventCache.filename = filenameString; } } /* Helper for handle post file path access */ void helper_handle_post_file_path_access(EVENT_ID &eventId) { - if (!AIKIDO_GLOBAL(eventCache).outgoingRequestUrl.empty()) { + auto& eventCache = AIKIDO_GLOBAL(eventCache); + if (!eventCache.outgoingRequestUrl.empty()) { // If the pre handler for path access determined this was actually an URL, // we need to notify that the request finished. eventId = EVENT_POST_OUTGOING_REQUEST; // As we cannot extract the effective URL for these fopen wrappers, - // we will just assume it's the same as the initial URL. - AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl = AIKIDO_GLOBAL(eventCache).outgoingRequestUrl; + // we will assume it's the same as the initial URL. + eventCache.outgoingRequestEffectiveUrl = eventCache.outgoingRequestUrl; } } diff --git a/lib/php-extension/HandleQueries.cpp b/lib/php-extension/HandleQueries.cpp index d3fb9b1c8..c9000cfbe 100644 --- a/lib/php-extension/HandleQueries.cpp +++ b/lib/php-extension/HandleQueries.cpp @@ -24,9 +24,10 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_query) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - AIKIDO_GLOBAL(eventCache).moduleName = "PDO"; - AIKIDO_GLOBAL(eventCache).sqlQuery = ZSTR_VAL(query); - AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.moduleName = "PDO"; + eventCache.sqlQuery = ZSTR_VAL(query); + eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); } AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_exec) { @@ -47,9 +48,10 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdo_exec) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - AIKIDO_GLOBAL(eventCache).moduleName = "PDO"; - AIKIDO_GLOBAL(eventCache).sqlQuery = ZSTR_VAL(query); - AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.moduleName = "PDO"; + eventCache.sqlQuery = ZSTR_VAL(query); + eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); } AIKIDO_HANDLER_FUNCTION(handle_pre_pdostatement_execute) { @@ -66,11 +68,12 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_pdostatement_execute) { } eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - AIKIDO_GLOBAL(eventCache).moduleName = "PDOStatement"; - AIKIDO_GLOBAL(eventCache).sqlQuery = PHP_GET_CHAR_PTR(stmt->query_string); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.moduleName = "PDOStatement"; + eventCache.sqlQuery = PHP_GET_CHAR_PTR(stmt->query_string); zval *pdo_object = &stmt->database_object_handle; - AIKIDO_GLOBAL(eventCache).sqlDialect = GetSqlDialectFromPdo(pdo_object); + eventCache.sqlDialect = GetSqlDialectFromPdo(pdo_object); } zend_class_entry* helper_load_mysqli_link_class_entry() { @@ -109,7 +112,8 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_mysqli_query){ scopedTimer.SetSink(sink, "sql_op"); eventId = EVENT_PRE_SQL_QUERY_EXECUTED; - AIKIDO_GLOBAL(eventCache).moduleName = "mysqli"; - AIKIDO_GLOBAL(eventCache).sqlQuery = query; - AIKIDO_GLOBAL(eventCache).sqlDialect = "mysql"; + auto& eventCache = AIKIDO_GLOBAL(eventCache); + eventCache.moduleName = "mysqli"; + eventCache.sqlQuery = query; + eventCache.sqlDialect = "mysql"; } diff --git a/lib/php-extension/HandleRateLimitGroup.cpp b/lib/php-extension/HandleRateLimitGroup.cpp index c57661aa5..5fc981af5 100644 --- a/lib/php-extension/HandleRateLimitGroup.cpp +++ b/lib/php-extension/HandleRateLimitGroup.cpp @@ -17,11 +17,12 @@ ZEND_FUNCTION(set_rate_limit_group) { RETURN_BOOL(false); } - AIKIDO_GLOBAL(requestCache).rateLimitGroup = std::string(group, groupLength); + auto& requestCache = AIKIDO_GLOBAL(requestCache); + requestCache.rateLimitGroup = std::string(group, groupLength); std::string outputEvent; AIKIDO_GLOBAL(requestProcessor).SendEvent(EVENT_SET_RATE_LIMIT_GROUP, outputEvent); - AIKIDO_LOG_DEBUG("Set rate limit group to %s\n", AIKIDO_GLOBAL(requestCache).rateLimitGroup.c_str()); + AIKIDO_LOG_DEBUG("Set rate limit group to %s\n", requestCache.rateLimitGroup.c_str()); RETURN_BOOL(true); } \ No newline at end of file diff --git a/lib/php-extension/HandleShouldBlockRequest.cpp b/lib/php-extension/HandleShouldBlockRequest.cpp index 200dec9c9..547b5c61f 100644 --- a/lib/php-extension/HandleShouldBlockRequest.cpp +++ b/lib/php-extension/HandleShouldBlockRequest.cpp @@ -18,9 +18,11 @@ bool CheckBlocking(EVENT_ID eventId, bool& checkedBlocking) { ScopedTimer scopedTimer("check_blocking", "aikido_op"); try { + auto& requestProcessor = AIKIDO_GLOBAL(requestProcessor); + auto& action = AIKIDO_GLOBAL(action); std::string output; - AIKIDO_GLOBAL(requestProcessor).SendEvent(eventId, output); - AIKIDO_GLOBAL(action).Execute(output); + requestProcessor.SendEvent(eventId, output); + action.Execute(output); checkedBlocking = true; return true; } catch (const std::exception &e) { @@ -56,12 +58,13 @@ ZEND_FUNCTION(should_block_request) { #else zval *obj = return_value; #endif - zend_update_property_bool(blockingStatusClass, obj, "block", sizeof("block") - 1, AIKIDO_GLOBAL(action).Block()); - zend_update_property_string(blockingStatusClass, obj, "type", sizeof("type") - 1, AIKIDO_GLOBAL(action).Type()); - zend_update_property_string(blockingStatusClass, obj, "trigger", sizeof("trigger") - 1, AIKIDO_GLOBAL(action).Trigger()); - zend_update_property_string(blockingStatusClass, obj, "description", sizeof("description") - 1, AIKIDO_GLOBAL(action).Description()); - zend_update_property_string(blockingStatusClass, obj, "ip", sizeof("ip") - 1, AIKIDO_GLOBAL(action).Ip()); - zend_update_property_string(blockingStatusClass, obj, "user_agent", sizeof("user_agent") - 1, AIKIDO_GLOBAL(action).UserAgent()); + auto& action = AIKIDO_GLOBAL(action); + zend_update_property_bool(blockingStatusClass, obj, "block", sizeof("block") - 1, action.Block()); + zend_update_property_string(blockingStatusClass, obj, "type", sizeof("type") - 1, action.Type()); + zend_update_property_string(blockingStatusClass, obj, "trigger", sizeof("trigger") - 1, action.Trigger()); + zend_update_property_string(blockingStatusClass, obj, "description", sizeof("description") - 1, action.Description()); + zend_update_property_string(blockingStatusClass, obj, "ip", sizeof("ip") - 1, action.Ip()); + zend_update_property_string(blockingStatusClass, obj, "user_agent", sizeof("user_agent") - 1, action.UserAgent()); } ZEND_FUNCTION(auto_block_request) { diff --git a/lib/php-extension/HandleUrls.cpp b/lib/php-extension/HandleUrls.cpp index 53bf2b88a..8b3969062 100644 --- a/lib/php-extension/HandleUrls.cpp +++ b/lib/php-extension/HandleUrls.cpp @@ -13,31 +13,34 @@ AIKIDO_HANDLER_FUNCTION(handle_pre_curl_exec) { #endif ZEND_PARSE_PARAMETERS_END(); - AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); - AIKIDO_GLOBAL(eventCache).outgoingRequestPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + auto& requestCache = AIKIDO_GLOBAL(requestCache); + + eventCache.outgoingRequestUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); + eventCache.outgoingRequestPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); // if requestCache.outgoingRequestUrl is not empty, we check if it's a redirect - if (!AIKIDO_GLOBAL(requestCache).outgoingRequestUrl.empty()) { - json outgoingRequestUrlJson = CallPhpFunctionParseUrl(AIKIDO_GLOBAL(eventCache).outgoingRequestUrl); - json outgoingRequestRedirectUrlJson = CallPhpFunctionParseUrl(AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl); + if (!requestCache.outgoingRequestUrl.empty()) { + json outgoingRequestUrlJson = CallPhpFunctionParseUrl(eventCache.outgoingRequestUrl); + json outgoingRequestRedirectUrlJson = CallPhpFunctionParseUrl(requestCache.outgoingRequestRedirectUrl); // if the host and port are the same, we use the initial URL, otherwise we use the effective URL if (!outgoingRequestUrlJson.empty() && !outgoingRequestRedirectUrlJson.empty() && outgoingRequestUrlJson["host"] == outgoingRequestRedirectUrlJson["host"] && outgoingRequestUrlJson["port"] == outgoingRequestRedirectUrlJson["port"]) { - AIKIDO_GLOBAL(eventCache).outgoingRequestUrl = AIKIDO_GLOBAL(requestCache).outgoingRequestUrl; + eventCache.outgoingRequestUrl = requestCache.outgoingRequestUrl; } else { // if previous outgoingRequestRedirectUrl it's different from outgoingRequestUrl it means that it's a new request // so we reset the outgoingRequestUrl - AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = ""; + requestCache.outgoingRequestUrl = ""; } } - if (AIKIDO_GLOBAL(eventCache).outgoingRequestUrl.empty()) return; + if (eventCache.outgoingRequestUrl.empty()) return; eventId = EVENT_PRE_OUTGOING_REQUEST; - AIKIDO_GLOBAL(eventCache).moduleName = "curl"; + eventCache.moduleName = "curl"; } AIKIDO_HANDLER_FUNCTION(handle_post_curl_exec) { @@ -56,24 +59,27 @@ AIKIDO_HANDLER_FUNCTION(handle_post_curl_exec) { eventId = EVENT_POST_OUTGOING_REQUEST; - AIKIDO_GLOBAL(eventCache).moduleName = "curl"; - AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); - AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrlPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); - AIKIDO_GLOBAL(eventCache).outgoingRequestResolvedIp = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_IP); + auto& eventCache = AIKIDO_GLOBAL(eventCache); + auto& requestCache = AIKIDO_GLOBAL(requestCache); + + eventCache.moduleName = "curl"; + eventCache.outgoingRequestEffectiveUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_EFFECTIVE_URL); + eventCache.outgoingRequestEffectiveUrlPort = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_PORT); + eventCache.outgoingRequestResolvedIp = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_PRIMARY_IP); std::string outgoingRequestResponseCode = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_RESPONSE_CODE); // if outgoingRequestResponseCode starts with 3, it's a redirect if (!outgoingRequestResponseCode.empty() && outgoingRequestResponseCode.substr(0, 1) == "3") { - AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_REDIRECT_URL); + requestCache.outgoingRequestRedirectUrl = CallPhpFunctionCurlGetInfo(curlHandle, CURLINFO_REDIRECT_URL); // if it's the first redirect - if (AIKIDO_GLOBAL(requestCache).outgoingRequestUrl.empty()) { - AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = AIKIDO_GLOBAL(eventCache).outgoingRequestEffectiveUrl; + if (requestCache.outgoingRequestUrl.empty()) { + requestCache.outgoingRequestUrl = eventCache.outgoingRequestEffectiveUrl; } } else { - AIKIDO_GLOBAL(requestCache).outgoingRequestUrl = ""; - AIKIDO_GLOBAL(requestCache).outgoingRequestRedirectUrl = ""; + requestCache.outgoingRequestUrl = ""; + requestCache.outgoingRequestRedirectUrl = ""; } } diff --git a/lib/php-extension/HandleUsers.cpp b/lib/php-extension/HandleUsers.cpp index 28b7e4dc4..a34b7d0cb 100644 --- a/lib/php-extension/HandleUsers.cpp +++ b/lib/php-extension/HandleUsers.cpp @@ -1,13 +1,16 @@ #include "Includes.h" bool SendUserEvent(std::string id, std::string username) { - AIKIDO_GLOBAL(requestCache).userId = id; - AIKIDO_GLOBAL(requestCache).userName = username; + auto& requestCache = AIKIDO_GLOBAL(requestCache); + requestCache.userId = id; + requestCache.userName = username; try { + auto& requestProcessor = AIKIDO_GLOBAL(requestProcessor); + auto& action = AIKIDO_GLOBAL(action); std::string output; - AIKIDO_GLOBAL(requestProcessor).SendEvent(EVENT_SET_USER, output); - AIKIDO_GLOBAL(action).Execute(output); + requestProcessor.SendEvent(EVENT_SET_USER, output); + action.Execute(output); return true; } catch (const std::exception &e) { AIKIDO_LOG_ERROR("Exception encountered in processing user event: %s\n", e.what()); diff --git a/lib/php-extension/HookAst.cpp b/lib/php-extension/HookAst.cpp index 19f14d143..ecad17289 100644 --- a/lib/php-extension/HookAst.cpp +++ b/lib/php-extension/HookAst.cpp @@ -10,15 +10,17 @@ void ast_to_clean_dtor(zval *zv) { } void ensure_ast_hashtable_initialized() { - if (!AIKIDO_GLOBAL(global_ast_to_clean)) { - ALLOC_HASHTABLE(AIKIDO_GLOBAL(global_ast_to_clean)); - zend_hash_init(AIKIDO_GLOBAL(global_ast_to_clean), 8, NULL, ast_to_clean_dtor, 1); + auto& globalAstToClean = AIKIDO_GLOBAL(global_ast_to_clean); + if (!globalAstToClean) { + ALLOC_HASHTABLE(globalAstToClean); + zend_hash_init(globalAstToClean, 8, NULL, ast_to_clean_dtor, 1); } } zend_ast *create_ast_call(const char *name) { ensure_ast_hashtable_initialized(); + auto& globalAstToClean = AIKIDO_GLOBAL(global_ast_to_clean); zend_ast *call; zend_ast_zval *name_var; zend_ast_list *arg_list; @@ -28,14 +30,14 @@ zend_ast *create_ast_call(const char *name) { name_var->kind = ZEND_AST_ZVAL; ZVAL_STRING(&name_var->val, name); name_var->val.u2.lineno = 0; - zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), name_var); + zend_hash_next_index_insert_ptr(globalAstToClean, name_var); // Create empty argument list arg_list = (zend_ast_list*)emalloc(sizeof(zend_ast_list)); arg_list->kind = ZEND_AST_ARG_LIST; arg_list->lineno = 0; arg_list->children = 0; - zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), arg_list); + zend_hash_next_index_insert_ptr(globalAstToClean, arg_list); // Create function call node call = (zend_ast*)emalloc(sizeof(zend_ast) + sizeof(zend_ast*)); @@ -43,7 +45,7 @@ zend_ast *create_ast_call(const char *name) { call->lineno = 0; call->child[0] = (zend_ast*)name_var; call->child[1] = (zend_ast*)arg_list; - zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), call); + zend_hash_next_index_insert_ptr(globalAstToClean, call); return call; } @@ -105,7 +107,8 @@ void insert_call_to_ast(zend_ast *ast) { block->children = 2; block->child[0] = call; block->child[1] = stmt_list->child[insertion_point]; - zend_hash_next_index_insert_ptr(AIKIDO_GLOBAL(global_ast_to_clean), block); + auto& globalAstToClean = AIKIDO_GLOBAL(global_ast_to_clean); + zend_hash_next_index_insert_ptr(globalAstToClean, block); stmt_list->child[insertion_point] = (zend_ast*)block; } @@ -113,39 +116,43 @@ void insert_call_to_ast(zend_ast *ast) { void aikido_ast_process(zend_ast *ast) { insert_call_to_ast(ast); - if(AIKIDO_GLOBAL(original_ast_process)){ - AIKIDO_GLOBAL(original_ast_process)(ast); + auto& originalAstProcess = AIKIDO_GLOBAL(original_ast_process); + if(originalAstProcess){ + originalAstProcess(ast); } } void HookAstProcess() { - if (AIKIDO_GLOBAL(original_ast_process)) { - AIKIDO_LOG_WARN("\"zend_ast_process\" already hooked (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); + auto& originalAstProcess = AIKIDO_GLOBAL(original_ast_process); + if (originalAstProcess) { + AIKIDO_LOG_WARN("\"zend_ast_process\" already hooked (original handler %p)!\n", originalAstProcess); return; } - AIKIDO_GLOBAL(original_ast_process) = zend_ast_process; + originalAstProcess = zend_ast_process; zend_ast_process = aikido_ast_process; - AIKIDO_LOG_INFO("Hooked \"zend_ast_process\" (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); + AIKIDO_LOG_INFO("Hooked \"zend_ast_process\" (original handler %p)!\n", originalAstProcess); } void UnhookAstProcess() { - AIKIDO_LOG_INFO("Unhooked \"zend_ast_process\" (original handler %p)!\n", AIKIDO_GLOBAL(original_ast_process)); + auto& originalAstProcess = AIKIDO_GLOBAL(original_ast_process); + AIKIDO_LOG_INFO("Unhooked \"zend_ast_process\" (original handler %p)!\n", originalAstProcess); // As it's not mandatory to have a zend_ast_process installed, we need to ensure UnhookAstProcess() restores zend_ast_process even if the original was NULL // Only unhook if the current handler is still ours, avoiding clobbering others if (zend_ast_process == aikido_ast_process){ - zend_ast_process = AIKIDO_GLOBAL(original_ast_process); + zend_ast_process = originalAstProcess; } - AIKIDO_GLOBAL(original_ast_process) = nullptr; + originalAstProcess = nullptr; } void DestroyAstToClean() { - if (AIKIDO_GLOBAL(global_ast_to_clean)) { - zend_hash_destroy(AIKIDO_GLOBAL(global_ast_to_clean)); - FREE_HASHTABLE(AIKIDO_GLOBAL(global_ast_to_clean)); - AIKIDO_GLOBAL(global_ast_to_clean) = nullptr; + auto& globalAstToClean = AIKIDO_GLOBAL(global_ast_to_clean); + if (globalAstToClean) { + zend_hash_destroy(globalAstToClean); + FREE_HASHTABLE(globalAstToClean); + globalAstToClean = nullptr; } } \ No newline at end of file diff --git a/lib/php-extension/Log.cpp b/lib/php-extension/Log.cpp index 2400bfbf9..f60ac8098 100644 --- a/lib/php-extension/Log.cpp +++ b/lib/php-extension/Log.cpp @@ -37,7 +37,7 @@ void Log::Write(AIKIDO_LOG_LEVEL level, const char* format, ...) { return; } - fprintf(logFile, "[AIKIDO][%s][%d][%ld][%s] ", ToString(level).c_str(), getpid(), (long)GetThreadID(), GetTime().c_str()); + fprintf(logFile, "[AIKIDO][%s][%d][%jd][%s] ", ToString(level).c_str(), getpid(), (intmax_t)GetThreadID(), GetTime().c_str()); va_list args; va_start(args, format); diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index 9beac12f1..49a66397a 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -4,12 +4,13 @@ std::string RequestProcessor::GetInitData(const std::string& token) { LoadLaravelEnvFile(); LoadEnvironment(); + auto& globalToken = AIKIDO_GLOBAL(token); if (!token.empty()) { - AIKIDO_GLOBAL(token) = token; + globalToken = token; } json initData = { - {"token", AIKIDO_GLOBAL(token)}, + {"token", globalToken}, {"platform_name", AIKIDO_GLOBAL(sapi_name)}, {"platform_version", PHP_VERSION}, {"endpoint", AIKIDO_GLOBAL(endpoint)}, @@ -86,7 +87,8 @@ bool RequestProcessor::IsBlockingEnabled() { bool RequestProcessor::ReportStats() { AIKIDO_LOG_INFO("Reporting stats to Aikido Request Processor...\n"); - for (std::unordered_map::const_iterator it = AIKIDO_GLOBAL(stats).begin(); it != AIKIDO_GLOBAL(stats).end(); ++it) { + auto& statsMap = AIKIDO_GLOBAL(stats); + for (std::unordered_map::const_iterator it = statsMap.begin(); it != statsMap.end(); ++it) { const std::string& sink = it->first; const SinkStats& sinkStats = it->second; AIKIDO_LOG_INFO("Reporting stats for sink \"%s\" to Aikido Request Processor...\n", sink.c_str()); @@ -101,7 +103,7 @@ bool RequestProcessor::ReportStats() { GoCreateSlice(sinkStats.timings) ); } - AIKIDO_GLOBAL(stats).clear(); + statsMap.clear(); return true; } @@ -168,7 +170,8 @@ bool RequestProcessor::RequestInit() { return false; } - if (AIKIDO_GLOBAL(sapi_name) == "apache2handler") { + const auto& sapiName = AIKIDO_GLOBAL(sapi_name); + if (sapiName == "apache2handler") { // Apache-mod-php can serve multiple sites per process // We need to reload config each request to detect token changes this->LoadConfigFromEnvironment(); @@ -177,7 +180,7 @@ bool RequestProcessor::RequestInit() { // can only serve one site per process, so the config should be loaded only once. // After that, subsequent requests cannot change the config so we do not need to reload it. if (this->numberOfRequests == 0) { - AIKIDO_LOG_INFO("Loading Aikido config one time for non-apache-mod-php SAPI: %s...\n", AIKIDO_GLOBAL(sapi_name).c_str()); + AIKIDO_LOG_INFO("Loading Aikido config one time for non-apache-mod-php SAPI: %s...\n", sapiName.c_str()); this->LoadConfigFromEnvironment(); } } @@ -210,9 +213,10 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } void RequestProcessor::LoadConfigFromEnvironment() { - std::string previousToken = AIKIDO_GLOBAL(token); + auto& globalToken = AIKIDO_GLOBAL(token); + std::string previousToken = globalToken; LoadEnvironment(); - std::string currentToken = AIKIDO_GLOBAL(token); + std::string currentToken = globalToken; LoadConfig(previousToken, currentToken); } diff --git a/lib/php-extension/include/Includes.h b/lib/php-extension/include/Includes.h index 5a10967b7..337ca6bb8 100644 --- a/lib/php-extension/include/Includes.h +++ b/lib/php-extension/include/Includes.h @@ -13,6 +13,7 @@ #include #include #include +#include #include #include From 5e4905e3904332fb16691d59df8018aa799e3a3b Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 7 Nov 2025 17:55:03 +0000 Subject: [PATCH 003/170] Changed zend_hash_init --- lib/php-extension/Aikido.cpp | 5 ----- lib/php-extension/HookAst.cpp | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index e05907803..82729ed37 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -137,11 +137,6 @@ PHP_GINIT_FUNCTION(aikido) { } PHP_GSHUTDOWN_FUNCTION(aikido) { - if (aikido_globals->global_ast_to_clean) { - zend_hash_destroy(aikido_globals->global_ast_to_clean); - FREE_HASHTABLE(aikido_globals->global_ast_to_clean); - aikido_globals->global_ast_to_clean = nullptr; - } aikido_globals->laravelEnv.~unordered_map(); aikido_globals->stats.~unordered_map(); aikido_globals->phpLifecycle.~PhpLifecycle(); diff --git a/lib/php-extension/HookAst.cpp b/lib/php-extension/HookAst.cpp index ecad17289..aeb1f400a 100644 --- a/lib/php-extension/HookAst.cpp +++ b/lib/php-extension/HookAst.cpp @@ -13,7 +13,7 @@ void ensure_ast_hashtable_initialized() { auto& globalAstToClean = AIKIDO_GLOBAL(global_ast_to_clean); if (!globalAstToClean) { ALLOC_HASHTABLE(globalAstToClean); - zend_hash_init(globalAstToClean, 8, NULL, ast_to_clean_dtor, 1); + zend_hash_init(globalAstToClean, 8, NULL, ast_to_clean_dtor, 0); } } From bf126dbe06ccc863ad5b7636c50adaaf0bb7e327 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Mon, 10 Nov 2025 12:55:44 +0000 Subject: [PATCH 004/170] Refactor PHP function calls to ensure proper cleanup of zval variables --- lib/php-extension/Packages.cpp | 6 ++++-- lib/php-extension/PhpWrappers.cpp | 5 +++-- lib/php-extension/Utils.cpp | 12 +++++++----- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/lib/php-extension/Packages.cpp b/lib/php-extension/Packages.cpp index 3fa2e98df..19662ff72 100644 --- a/lib/php-extension/Packages.cpp +++ b/lib/php-extension/Packages.cpp @@ -2,11 +2,13 @@ std::string GetPhpPackageVersion(const std::string& packageName) { zval return_value; + std::string result = ""; CallPhpFunctionWithOneParam("phpversion", packageName, &return_value); if (Z_TYPE(return_value) == IS_STRING) { - return Z_STRVAL(return_value); + result = Z_STRVAL(return_value); } - return ""; + zval_ptr_dtor(&return_value); + return result; } unordered_map GetPhpPackages() { diff --git a/lib/php-extension/PhpWrappers.cpp b/lib/php-extension/PhpWrappers.cpp index d516cbbba..7958c2039 100644 --- a/lib/php-extension/PhpWrappers.cpp +++ b/lib/php-extension/PhpWrappers.cpp @@ -36,7 +36,7 @@ bool CallPhpFunction(std::string function_name, unsigned int params_number, zval int _result = call_user_function(EG(function_table), object, &_function_name, _return_value, params_number, params); - zend_string_release(_function_name_str); + zval_dtor(&_function_name); if (!return_value) { zval_ptr_dtor(&_temp_return_value); @@ -60,7 +60,8 @@ bool CallPhpFunctionWithOneParam(std::string function_name, std::string first_pa bool ret = CallPhpFunction(function_name, 1, _params, return_value, object); - zend_string_release(_first_param); + // Clean up the zval properly - this will handle the string refcount + zval_dtor(&_params[0]); return ret; } diff --git a/lib/php-extension/Utils.cpp b/lib/php-extension/Utils.cpp index 2b87cc0b8..268f9ce3c 100644 --- a/lib/php-extension/Utils.cpp +++ b/lib/php-extension/Utils.cpp @@ -104,12 +104,14 @@ std::string GetSqlDialectFromPdo(zval *pdo_object) { } zval retval; + std::string result = "unknown"; if (CallPhpFunctionWithOneParam("getAttribute", PDO_ATTR_DRIVER_NAME, &retval, pdo_object)) { if (Z_TYPE(retval) == IS_STRING) { - return Z_STRVAL(retval); + result = Z_STRVAL(retval); } } - return "unknown"; + zval_ptr_dtor(&retval); + return result; } bool StartsWith(const std::string& str, const std::string& prefix, bool caseSensitive) { @@ -128,9 +130,9 @@ json CallPhpFunctionParseUrl(const std::string& url) { } zval retval; + json result_json; if (CallPhpFunctionWithOneParam("parse_url", url, &retval)) { if (Z_TYPE(retval) == IS_ARRAY) { - json result_json; zval* host = zend_hash_str_find(Z_ARRVAL(retval), "host", sizeof("host") - 1); if (host && Z_TYPE_P(host) == IS_STRING) { result_json["host"] = Z_STRVAL_P(host); @@ -153,10 +155,10 @@ json CallPhpFunctionParseUrl(const std::string& url) { } } } - return result_json; } } - return json(); + zval_ptr_dtor(&retval); + return result_json; } std::string AnonymizeToken(const std::string& str) { From b0d489601a8b34ae7961bccf565232e7f1b0fe9f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Mon, 10 Nov 2025 15:52:46 +0000 Subject: [PATCH 005/170] ReportStats() accesses the stats map during RequestProcessor destruction, so destroy the stats map after RequestProcessor to prevent use-after-free. --- lib/php-extension/Aikido.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 82729ed37..0aaac7f58 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -138,12 +138,12 @@ PHP_GINIT_FUNCTION(aikido) { PHP_GSHUTDOWN_FUNCTION(aikido) { aikido_globals->laravelEnv.~unordered_map(); - aikido_globals->stats.~unordered_map(); aikido_globals->phpLifecycle.~PhpLifecycle(); aikido_globals->eventCache.~EventCache(); aikido_globals->requestCache.~RequestCache(); aikido_globals->action.~Action(); aikido_globals->requestProcessor.~RequestProcessor(); + aikido_globals->stats.~unordered_map(); aikido_globals->server.~Server(); aikido_globals->logger.~Log(); aikido_globals->agent.~Agent(); From 9dc8f995d40e3cf0a914f7d7df9ea5b573d33755 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Mon, 10 Nov 2025 16:35:37 +0000 Subject: [PATCH 006/170] Reorder destruction of global variables in PHP_GSHUTDOWN_FUNCTION to ensure proper cleanup and prevent use-after-free issues. --- lib/php-extension/Aikido.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 0aaac7f58..dfc1ee4d6 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -139,10 +139,10 @@ PHP_GINIT_FUNCTION(aikido) { PHP_GSHUTDOWN_FUNCTION(aikido) { aikido_globals->laravelEnv.~unordered_map(); aikido_globals->phpLifecycle.~PhpLifecycle(); - aikido_globals->eventCache.~EventCache(); - aikido_globals->requestCache.~RequestCache(); aikido_globals->action.~Action(); aikido_globals->requestProcessor.~RequestProcessor(); + aikido_globals->eventCache.~EventCache(); + aikido_globals->requestCache.~RequestCache(); aikido_globals->stats.~unordered_map(); aikido_globals->server.~Server(); aikido_globals->logger.~Log(); From 2f5fffb5c49fdde83b624a7c2280eed0eff78bbc Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Mon, 10 Nov 2025 17:53:40 +0000 Subject: [PATCH 007/170] Update Server::GetBody() to properly release zend_string + fix destructor order --- lib/php-extension/Aikido.cpp | 4 ++-- lib/php-extension/Server.cpp | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index dfc1ee4d6..b9ac07cfc 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -141,12 +141,12 @@ PHP_GSHUTDOWN_FUNCTION(aikido) { aikido_globals->phpLifecycle.~PhpLifecycle(); aikido_globals->action.~Action(); aikido_globals->requestProcessor.~RequestProcessor(); - aikido_globals->eventCache.~EventCache(); - aikido_globals->requestCache.~RequestCache(); aikido_globals->stats.~unordered_map(); aikido_globals->server.~Server(); aikido_globals->logger.~Log(); aikido_globals->agent.~Agent(); + aikido_globals->eventCache.~EventCache(); + aikido_globals->requestCache.~RequestCache(); aikido_globals->config_endpoint.~string(); aikido_globals->endpoint.~string(); aikido_globals->token.~string(); diff --git a/lib/php-extension/Server.cpp b/lib/php-extension/Server.cpp index 6f8bc7a17..490a2494f 100644 --- a/lib/php-extension/Server.cpp +++ b/lib/php-extension/Server.cpp @@ -74,7 +74,9 @@ std::string Server::GetBody() { stream = php_stream_open_wrapper("php://input", "rb", 0 | REPORT_ERRORS, NULL); if ((contents = php_stream_copy_to_mem(stream, maxlen, 0)) != NULL) { php_stream_close(stream); - return std::string(ZSTR_VAL(contents)); + std::string result = std::string(ZSTR_VAL(contents), ZSTR_LEN(contents)); + zend_string_release(contents); + return result; } php_stream_close(stream); return ""; From 4b45bc029812ac1cbe8580983ccb7fa71dd0e880 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Mon, 10 Nov 2025 19:35:04 +0000 Subject: [PATCH 008/170] Add ZTS support for global variable cleanup in PHP_GSHUTDOWN_FUNCTION and enhance shell execution statistics --- lib/php-extension/Aikido.cpp | 2 ++ lib/php-extension/HandleShellExecution.cpp | 2 ++ lib/php-extension/include/php_aikido.h | 25 ++++++++++++---------- tools/build.sh | 10 +++++++++ tools/sample_apps_build.sh | 10 +++++++++ 5 files changed, 38 insertions(+), 11 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index b9ac07cfc..bb25fe602 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -137,6 +137,7 @@ PHP_GINIT_FUNCTION(aikido) { } PHP_GSHUTDOWN_FUNCTION(aikido) { +#ifdef ZTS aikido_globals->laravelEnv.~unordered_map(); aikido_globals->phpLifecycle.~PhpLifecycle(); aikido_globals->action.~Action(); @@ -152,6 +153,7 @@ PHP_GSHUTDOWN_FUNCTION(aikido) { aikido_globals->token.~string(); aikido_globals->sapi_name.~string(); aikido_globals->log_level_str.~string(); +#endif } zend_module_entry aikido_module_entry = { diff --git a/lib/php-extension/HandleShellExecution.cpp b/lib/php-extension/HandleShellExecution.cpp index e6023b6b3..d9abcf3e0 100644 --- a/lib/php-extension/HandleShellExecution.cpp +++ b/lib/php-extension/HandleShellExecution.cpp @@ -24,6 +24,8 @@ AIKIDO_HANDLER_FUNCTION(handle_shell_execution) { AIKIDO_HANDLER_FUNCTION(handle_shell_execution_with_array) { + scopedTimer.SetSink(sink, "exec_op"); + zval *cmdVal = nullptr; ZEND_PARSE_PARAMETERS_START(0, -1) diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index d99fe5d99..cda90f461 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -35,28 +35,31 @@ bool collect_api_schema; bool trust_proxy; bool localhost_allowed_by_default; unsigned int report_stats_interval_to_agent; // Report once every X requests the collected stats to Agent +std::chrono::high_resolution_clock::time_point currentRequestStart; +uint64_t totalOverheadForCurrentRequest; +bool laravelEnvLoaded; +bool checkedAutoBlock; +bool checkedShouldBlockRequest; +HashTable *global_ast_to_clean; +void (*original_ast_process)(zend_ast *ast); +// IMPORTANT: The order of these objects MUST NOT be changed due to object interdependencies. +// This ensures proper construction/destruction order in both ZTS and non-ZTS modes. +// Objects are constructed in this order and destroyed in reverse order. std::string log_level_str; std::string sapi_name; std::string token; std::string endpoint; std::string config_endpoint; -Log logger; +RequestCache requestCache; +EventCache eventCache; Agent agent; +Log logger; Server server; +std::unordered_map stats; RequestProcessor requestProcessor; Action action; -RequestCache requestCache; -EventCache eventCache; PhpLifecycle phpLifecycle; -std::unordered_map stats; -std::chrono::high_resolution_clock::time_point currentRequestStart; -uint64_t totalOverheadForCurrentRequest; std::unordered_map laravelEnv; -bool laravelEnvLoaded; -bool checkedAutoBlock; -bool checkedShouldBlockRequest; -HashTable *global_ast_to_clean; -void (*original_ast_process)(zend_ast *ast); ZEND_END_MODULE_GLOBALS(aikido) ZEND_EXTERN_MODULE_GLOBALS(aikido) diff --git a/tools/build.sh b/tools/build.sh index 75058183c..b5749ca6a 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -23,6 +23,16 @@ go test ./... go build -ldflags "-s -w" -buildmode=c-shared -o ../../build/aikido-request-processor.so cd ../../build CXX=g++ CXXFLAGS="-fPIC -g -O2 -I../lib/php-extension/include" LDFLAGS="-lstdc++" ../lib/php-extension/configure +sed -i "s/available_tags=''/available_tags='CXX'/" libtool +if ! grep -q "BEGIN LIBTOOL TAG CONFIG: CXX" libtool; then + sed -i '/^# ### BEGIN LIBTOOL TAG CONFIG: disable-shared$/i\ +# ### BEGIN LIBTOOL TAG CONFIG: CXX\ +LTCXX="g++"\ +CXXFLAGS="-fPIC -g -O2"\ +compiler_CXX="g++"\ +# ### END LIBTOOL TAG CONFIG: CXX\ +' libtool +fi make cd ./modules/ mv aikido.so $AIKIDO_EXTENSION diff --git a/tools/sample_apps_build.sh b/tools/sample_apps_build.sh index e65c0ebc0..d83c453ef 100644 --- a/tools/sample_apps_build.sh +++ b/tools/sample_apps_build.sh @@ -31,6 +31,16 @@ go mod tidy go build -ldflags "-s -w" -buildmode=c-shared -o ../../build/aikido-request-processor.so cd ../../build CXX=g++ CXXFLAGS="-fPIC -g -O2 -I../lib/php-extension/include" LDFLAGS="-lstdc++" ../lib/php-extension/configure +sed -i "s/available_tags=''/available_tags='CXX'/" libtool +if ! grep -q "BEGIN LIBTOOL TAG CONFIG: CXX" libtool; then + sed -i '/^# ### BEGIN LIBTOOL TAG CONFIG: disable-shared$/i\ +# ### BEGIN LIBTOOL TAG CONFIG: CXX\ +LTCXX="g++"\ +CXXFLAGS="-fPIC -g -O2"\ +compiler_CXX="g++"\ +# ### END LIBTOOL TAG CONFIG: CXX\ +' libtool +fi make cd ./modules/ mv aikido.so $AIKIDO_EXTENSION From b3f2f87b184ec31c5f2b4babc1d6ebd5ccc984b7 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 11 Nov 2025 12:07:06 +0000 Subject: [PATCH 009/170] Temporary disable franken installation support. We should provide the zts extension binaries in rpm --- package/rpm/aikido.spec | 126 ++++++++++++++++++++-------------------- 1 file changed, 64 insertions(+), 62 deletions(-) diff --git a/package/rpm/aikido.spec b/package/rpm/aikido.spec index bddf90245..afd7ab6b6 100644 --- a/package/rpm/aikido.spec +++ b/package/rpm/aikido.spec @@ -57,18 +57,18 @@ fi # Check if FrankenPHP is installed -FRANKENPHP_PHP_VERSION="" -if command -v frankenphp -v >/dev/null 2>&1; then - if frankenphp -v >/dev/null 2>&1; then - FRANKENPHP_PHP_VERSION=$(frankenphp -v 2>/dev/null | grep -oP 'PHP \K\d+\.\d+' | head -n 1) - fi - - if [ -n "$FRANKENPHP_PHP_VERSION" ]; then - echo "Found FrankenPHP with embedded PHP $FRANKENPHP_PHP_VERSION" - else - echo "Found FrankenPHP but could not determine PHP version" - fi -fi +# FRANKENPHP_PHP_VERSION="" +# if command -v frankenphp -v >/dev/null 2>&1; then +# if frankenphp -v >/dev/null 2>&1; then +# FRANKENPHP_PHP_VERSION=$(frankenphp -v 2>/dev/null | grep -oP 'PHP \K\d+\.\d+' | head -n 1) +# fi +# +# if [ -n "$FRANKENPHP_PHP_VERSION" ]; then +# echo "Found FrankenPHP with embedded PHP $FRANKENPHP_PHP_VERSION" +# else +# echo "Found FrankenPHP but could not determine PHP version" +# fi +# fi for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Installing for PHP $PHP_VERSION..." @@ -131,35 +131,37 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do done # Install for FrankenPHP if installed -if [ -n "$FRANKENPHP_PHP_VERSION" ]; then - echo "Installing for FrankenPHP with PHP $FRANKENPHP_PHP_VERSION..." - - FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" - FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" - - # Install Aikido PHP extension for FrankenPHP - if [ -d "$FRANKENPHP_EXT_DIR" ]; then - echo "Installing new Aikido extension in $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." - ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so - else - echo "FrankenPHP extension directory $FRANKENPHP_EXT_DIR not found! Creating it..." - mkdir -p $FRANKENPHP_EXT_DIR - ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so - fi - - # Install Aikido ini file for FrankenPHP - if [ -d "$FRANKENPHP_INI_DIR" ]; then - echo "Installing new Aikido mod in $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." - ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini - else - echo "FrankenPHP ini directory $FRANKENPHP_INI_DIR not found! Creating it..." - mkdir -p $FRANKENPHP_INI_DIR - ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini - fi -fi - -if [ ${#PHP_VERSIONS[@]} -eq 0 ] && [ -z "$FRANKENPHP_PHP_VERSION" ]; then - echo "No PHP or FrankenPHP found! Exiting!" +# if [ -n "$FRANKENPHP_PHP_VERSION" ]; then +# echo "Installing for FrankenPHP with PHP $FRANKENPHP_PHP_VERSION..." +# +# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" +# +# # Install Aikido PHP extension for FrankenPHP +# if [ -d "$FRANKENPHP_EXT_DIR" ]; then +# echo "Installing new Aikido extension in $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." +# ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so +# else +# echo "FrankenPHP extension directory $FRANKENPHP_EXT_DIR not found! Creating it..." +# mkdir -p $FRANKENPHP_EXT_DIR +# ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so +# fi +# +# # Install Aikido ini file for FrankenPHP +# if [ -d "$FRANKENPHP_INI_DIR" ]; then +# echo "Installing new Aikido mod in $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." +# ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini +# else +# echo "FrankenPHP ini directory $FRANKENPHP_INI_DIR not found! Creating it..." +# mkdir -p $FRANKENPHP_INI_DIR +# ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini +# fi +# fi + +if [ ${#PHP_VERSIONS[@]} -eq 0 ]; then +# if [ ${#PHP_VERSIONS[@]} -eq 0 ] && [ -z "$FRANKENPHP_PHP_VERSION" ]; then + echo "No PHP found! Exiting!" +# echo "No PHP or FrankenPHP found! Exiting!" exit 1 fi @@ -200,12 +202,12 @@ done echo "Found PHP versions: ${PHP_VERSIONS[*]}" # Check if FrankenPHP directories exist for uninstall -FRANKENPHP_INSTALLED=false -FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" -FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" -if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then - FRANKENPHP_INSTALLED=true -fi +# FRANKENPHP_INSTALLED=false +# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" +# if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then +# FRANKENPHP_INSTALLED=true +# fi for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Uninstalling for PHP $PHP_VERSION..." @@ -262,21 +264,21 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do done # Uninstall for FrankenPHP if directories exist -FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" -FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" -if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then - echo "Uninstalling for FrankenPHP..." - - if [ -f "$FRANKENPHP_EXT_DIR/aikido-%{version}.so" ]; then - echo "Uninstalling Aikido extension from $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." - rm -f $FRANKENPHP_EXT_DIR/aikido-%{version}.so - fi - - if [ -f "$FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini" ]; then - echo "Uninstalling Aikido mod from $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." - rm -f $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini - fi -fi +# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" +# if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then +# echo "Uninstalling for FrankenPHP..." +# +# if [ -f "$FRANKENPHP_EXT_DIR/aikido-%{version}.so" ]; then +# echo "Uninstalling Aikido extension from $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." +# rm -f $FRANKENPHP_EXT_DIR/aikido-%{version}.so +# fi +# +# if [ -f "$FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini" ]; then +# echo "Uninstalling Aikido mod from $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." +# rm -f $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini +# fi +# fi # Remove the Aikido logs folder rm -rf /var/log/aikido-%{version} From 3ffed085bf1f1be72ab401d4c94c7a398bf03f20 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 11 Nov 2025 14:16:45 +0000 Subject: [PATCH 010/170] GINIT with proper ifdef --- lib/php-extension/Aikido.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 2bbcf865e..7000efd19 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -114,6 +114,7 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->checkedShouldBlockRequest = false; aikido_globals->global_ast_to_clean = nullptr; aikido_globals->original_ast_process = nullptr; +#ifdef ZTS new (&aikido_globals->log_level_str) std::string(); new (&aikido_globals->sapi_name) std::string(); new (&aikido_globals->token) std::string(); @@ -129,6 +130,7 @@ PHP_GINIT_FUNCTION(aikido) { new (&aikido_globals->phpLifecycle) PhpLifecycle(); new (&aikido_globals->stats) std::unordered_map(); new (&aikido_globals->laravelEnv) std::unordered_map(); +#endif } PHP_GSHUTDOWN_FUNCTION(aikido) { From 0a2cd8d4ed22f8c26aeb3787cacaf2f593f07575 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 18 Nov 2025 14:40:41 +0000 Subject: [PATCH 011/170] Enhance thread safety and instance management in RequestProcessor - Updated RequestProcessor to manage instances with thread safety, supporting both NTS and ZTS modes. - Introduced CreateInstance and DestroyInstance functions for instance management. - Refactored existing function pointers to accept instance pointers, ensuring proper context handling. - Improved logging and error handling during initialization and configuration updates. - Added mutex locks to prevent race conditions in concurrent environments. - Adjusted various function signatures to accommodate instance pointers for better encapsulation of state. --- lib/php-extension/Log.cpp | 2 +- lib/php-extension/PhpLifecycle.cpp | 5 + lib/php-extension/RequestProcessor.cpp | 68 ++++++--- lib/php-extension/Server.cpp | 2 +- lib/php-extension/Utils.cpp | 8 +- lib/php-extension/include/Includes.h | 1 + lib/php-extension/include/RequestProcessor.h | 24 ++- lib/php-extension/include/Utils.h | 2 +- lib/request-processor/aikido_types/handle.go | 2 +- lib/request-processor/attack/attack.go | 7 +- lib/request-processor/config/config.go | 28 ++-- lib/request-processor/context/cache.go | 44 ++++-- .../context/context_for_unit_tests.go | 12 ++ .../context/request_context.go | 60 +++++++- lib/request-processor/globals/globals.go | 6 - lib/request-processor/grpc/config.go | 20 ++- .../handle_blocking_request.go | 12 +- .../handle_path_traversal.go | 3 +- .../handle_rate_limit_group_event.go | 3 +- .../handle_request_metadata.go | 21 ++- .../handle_shell_execution.go | 3 +- lib/request-processor/handle_sql_queries.go | 3 +- lib/request-processor/handle_urls.go | 8 +- lib/request-processor/handle_user_event.go | 6 +- lib/request-processor/instance/manager.go | 98 ++++++++++++ lib/request-processor/instance/wrapper.go | 127 +++++++++++++++ lib/request-processor/log/log.go | 48 +++++- lib/request-processor/main.go | 144 ++++++++++++++---- .../vulnerabilities/ssrf/isRequestToItself.go | 7 +- .../ssrf/isRequestToItself_test.go | 8 +- .../zen-internals/zen_internals.go | 46 ++++-- tools/build.sh | 2 + tools/rpm_full_build.sh | 3 +- 33 files changed, 682 insertions(+), 151 deletions(-) create mode 100644 lib/request-processor/instance/manager.go create mode 100644 lib/request-processor/instance/wrapper.go diff --git a/lib/php-extension/Log.cpp b/lib/php-extension/Log.cpp index f60ac8098..4838cd8f6 100644 --- a/lib/php-extension/Log.cpp +++ b/lib/php-extension/Log.cpp @@ -37,7 +37,7 @@ void Log::Write(AIKIDO_LOG_LEVEL level, const char* format, ...) { return; } - fprintf(logFile, "[AIKIDO][%s][%d][%jd][%s] ", ToString(level).c_str(), getpid(), (intmax_t)GetThreadID(), GetTime().c_str()); + fprintf(logFile, "[AIKIDO][%s][%d][%lu][%s] ", ToString(level).c_str(), getpid(), GetThreadID(), GetTime().c_str()); va_list args; va_start(args, format); diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index ca814f3cf..adef19499 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -23,6 +23,10 @@ void PhpLifecycle::RequestShutdown() { } void PhpLifecycle::ModuleShutdown() { +#ifdef ZTS + AIKIDO_LOG_INFO("ZTS mode: Uninitializing Aikido Request Processor to stop background goroutines...\n"); + AIKIDO_GLOBAL(requestProcessor).Uninit(); +#else if (this->mainPID == getpid()) { AIKIDO_LOG_INFO("Module shutdown called on main PID.\n"); AIKIDO_LOG_INFO("Unhooking functions...\n"); @@ -33,6 +37,7 @@ void PhpLifecycle::ModuleShutdown() { AIKIDO_LOG_INFO("Module shutdown NOT called on main PID. Uninitializing Aikido Request Processor...\n"); AIKIDO_GLOBAL(requestProcessor).Uninit(); } +#endif } void PhpLifecycle::HookAll() { diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index d509b02df..ad6886537 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -27,20 +27,20 @@ std::string RequestProcessor::GetInitData(const std::string& token) { } bool RequestProcessor::ContextInit() { - if (!this->requestInitialized || this->requestProcessorContextInitFn == nullptr) { + if (!this->requestInitialized || this->requestProcessorContextInitFn == nullptr || this->requestProcessorInstance == nullptr) { return false; } - return this->requestProcessorContextInitFn(GoContextCallback); + return this->requestProcessorContextInitFn(this->requestProcessorInstance, GoContextCallback); } bool RequestProcessor::SendEvent(EVENT_ID eventId, std::string& output) { - if (!this->requestInitialized || this->requestProcessorOnEventFn == nullptr) { + if (!this->requestInitialized || this->requestProcessorOnEventFn == nullptr || this->requestProcessorInstance == nullptr) { return false; } AIKIDO_LOG_DEBUG("Sending event %s...\n", GetEventName(eventId)); - char* charPtr = this->requestProcessorOnEventFn(eventId); + char* charPtr = this->requestProcessorOnEventFn(this->requestProcessorInstance, eventId); if (!charPtr) { AIKIDO_LOG_DEBUG("Got event reply: nullptr\n"); return true; @@ -78,10 +78,10 @@ void RequestProcessor::SendPostRequestEvent() { Otherwise, return the env variable AIKIDO_BLOCK. */ bool RequestProcessor::IsBlockingEnabled() { - if (!this->requestInitialized || this->requestProcessorGetBlockingModeFn == nullptr) { + if (!this->requestInitialized || this->requestProcessorGetBlockingModeFn == nullptr || this->requestProcessorInstance == nullptr) { return false; } - int ret = this->requestProcessorGetBlockingModeFn(); + int ret = this->requestProcessorGetBlockingModeFn(this->requestProcessorInstance); if (ret == -1) { ret = AIKIDO_GLOBAL(blocking); } @@ -103,6 +103,7 @@ bool RequestProcessor::ReportStats() { const SinkStats& sinkStats = it->second; AIKIDO_LOG_INFO("Reporting stats for sink \"%s\" to Aikido Request Processor...\n", sink.c_str()); requestProcessorReportStatsFn( + this->requestProcessorInstance, GoCreateString(sink), GoCreateString(sinkStats.kind), sinkStats.attacksDetected, @@ -141,13 +142,17 @@ bool RequestProcessor::Init() { std::string requestProcessorLibPath = "/opt/aikido-" + std::string(PHP_AIKIDO_VERSION) + "/aikido-request-processor.so"; this->libHandle = dlopen(requestProcessorLibPath.c_str(), RTLD_LAZY); if (!this->libHandle) { - AIKIDO_LOG_ERROR("Error loading the Aikido Request Processor library from %s: %s!\n", requestProcessorLibPath.c_str(), dlerror()); + const char* err = dlerror(); + AIKIDO_LOG_ERROR("Error loading the Aikido Request Processor library from %s: %s!\n", requestProcessorLibPath.c_str(), err); this->initFailed = true; return false; } AIKIDO_LOG_INFO("Initializing Aikido Request Processor...\n"); + this->createInstanceFn = (CreateInstanceFn)dlsym(libHandle, "CreateInstance"); + this->destroyInstanceFn = (DestroyInstanceFn)dlsym(libHandle, "DestroyInstance"); + RequestProcessorInitFn requestProcessorInitFn = (RequestProcessorInitFn)dlsym(libHandle, "RequestProcessorInit"); this->requestProcessorContextInitFn = (RequestProcessorContextInitFn)dlsym(libHandle, "RequestProcessorContextInit"); this->requestProcessorConfigUpdateFn = (RequestProcessorConfigUpdateFn)dlsym(libHandle, "RequestProcessorConfigUpdate"); @@ -155,7 +160,9 @@ bool RequestProcessor::Init() { this->requestProcessorGetBlockingModeFn = (RequestProcessorGetBlockingModeFn)dlsym(libHandle, "RequestProcessorGetBlockingMode"); this->requestProcessorReportStatsFn = (RequestProcessorReportStats)dlsym(libHandle, "RequestProcessorReportStats"); this->requestProcessorUninitFn = (RequestProcessorUninitFn)dlsym(libHandle, "RequestProcessorUninit"); - if (!requestProcessorInitFn || + if (!this->createInstanceFn || + !this->destroyInstanceFn || + !requestProcessorInitFn || !this->requestProcessorContextInitFn || !this->requestProcessorConfigUpdateFn || !this->requestProcessorOnEventFn || @@ -167,15 +174,11 @@ bool RequestProcessor::Init() { return false; } - if (!requestProcessorInitFn(GoCreateString(initDataString))) { - AIKIDO_LOG_ERROR("Failed to initialize Aikido Request Processor library: %s!\n", dlerror()); - this->initFailed = true; - return false; - } + this->requestProcessorInitFn = requestProcessorInitFn; AIKIDO_GLOBAL(logger).Init(); - AIKIDO_LOG_INFO("Aikido Request Processor initialized successfully (SAPI: %s)!\n", AIKIDO_GLOBAL(sapi_name).c_str()); + AIKIDO_LOG_INFO("Aikido Request Processor library loaded successfully (SAPI: %s)!\n", AIKIDO_GLOBAL(sapi_name).c_str()); return true; } @@ -184,7 +187,35 @@ bool RequestProcessor::RequestInit() { AIKIDO_LOG_ERROR("Failed to initialize the request processor: %s!\n", dlerror()); return false; } + if (this->requestProcessorInstance == nullptr && this->createInstanceFn != nullptr) { + uint64_t threadId = GetThreadID(); + #ifdef ZTS + bool isZTS = true; + #else + bool isZTS = false; + #endif + this->requestProcessorInstance = this->createInstanceFn(threadId, isZTS); + if (this->requestProcessorInstance == nullptr) { + AIKIDO_LOG_ERROR("Failed to create Go RequestProcessorInstance!\n"); + return false; + } + AIKIDO_LOG_INFO("Created Go RequestProcessorInstance (threadId: %lu, isZTS: %d)\n", threadId, isZTS); + + if (this->requestProcessorInitFn == nullptr) { + AIKIDO_LOG_ERROR("RequestProcessorInitFn is not loaded!\n"); + return false; + } + + + std::string initDataString = this->GetInitData(); + if (!this->requestProcessorInitFn(this->requestProcessorInstance, GoCreateString(initDataString))) { + AIKIDO_LOG_ERROR("Failed to initialize Aikido Request Processor!\n"); + return false; + } + AIKIDO_LOG_INFO("RequestProcessorInit called successfully\n"); + } + const auto& sapiName = AIKIDO_GLOBAL(sapi_name); if (sapiName == "apache2handler") { // Apache-mod-php can serve multiple sites per process @@ -220,7 +251,7 @@ bool RequestProcessor::RequestInit() { } void RequestProcessor::LoadConfig(const std::string& previousToken, const std::string& currentToken) { - if (this->requestProcessorConfigUpdateFn == nullptr) { + if (this->requestProcessorConfigUpdateFn == nullptr || this->requestProcessorInstance == nullptr) { return; } if (currentToken.empty()) { @@ -234,7 +265,7 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s AIKIDO_LOG_INFO("Reloading Aikido config...\n"); std::string initJson = this->GetInitData(currentToken); - this->requestProcessorConfigUpdateFn(GoCreateString(initJson)); + this->requestProcessorConfigUpdateFn(this->requestProcessorInstance, GoCreateString(initJson)); } void RequestProcessor::LoadConfigFromEnvironment() { @@ -258,15 +289,16 @@ void RequestProcessor::Uninit() { if (!this->libHandle) { return; } - if (!this->initFailed && this->requestProcessorUninitFn) { + if (!this->initFailed && this->requestProcessorUninitFn && this->requestProcessorInstance != nullptr) { AIKIDO_LOG_INFO("Reporting final stats to Aikido Request Processor...\n"); this->ReportStats(); AIKIDO_LOG_INFO("Calling uninit for Aikido Request Processor...\n"); - this->requestProcessorUninitFn(); + this->requestProcessorUninitFn(this->requestProcessorInstance); } dlclose(this->libHandle); this->libHandle = nullptr; + this->requestProcessorInstance = nullptr; AIKIDO_LOG_INFO("Aikido Request Processor unloaded!\n"); } diff --git a/lib/php-extension/Server.cpp b/lib/php-extension/Server.cpp index 94d096d16..f40a67ebf 100644 --- a/lib/php-extension/Server.cpp +++ b/lib/php-extension/Server.cpp @@ -21,7 +21,7 @@ zval* Server::GetServerVar() { } /* Get the "_SERVER" PHP global variable */ - return &PG(http_globals)[TRACK_VARS_SERVER]; + return &PG(http_globals)[TRACK_VARS_SERVER]; } std::string Server::GetVar(const char* var) { diff --git a/lib/php-extension/Utils.cpp b/lib/php-extension/Utils.cpp index 0216e001b..b25041dd2 100644 --- a/lib/php-extension/Utils.cpp +++ b/lib/php-extension/Utils.cpp @@ -33,12 +33,8 @@ std::string GetDateTime() { return time_str; } -pid_t GetThreadID() { -#ifdef SYS_gettid - return syscall(SYS_gettid); -#else - return (pid_t)getpid(); // Fallback for non-Linux systems -#endif +uint64_t GetThreadID() { + return (uint64_t)pthread_self(); } const char* GetEventName(EVENT_ID event) { switch (event) { diff --git a/lib/php-extension/include/Includes.h b/lib/php-extension/include/Includes.h index 337ca6bb8..05690af5d 100644 --- a/lib/php-extension/include/Includes.h +++ b/lib/php-extension/include/Includes.h @@ -14,6 +14,7 @@ #include #include #include +#include #include #include diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index f18addcd8..2fe0adfa6 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -1,19 +1,29 @@ #pragma once -typedef GoUint8 (*RequestProcessorInitFn)(GoString initJson); -typedef GoUint8 (*RequestProcessorContextInitFn)(ContextCallback); -typedef GoUint8 (*RequestProcessorConfigUpdateFn)(GoString initJson); -typedef char* (*RequestProcessorOnEventFn)(GoInt eventId); -typedef int (*RequestProcessorGetBlockingModeFn)(); -typedef void (*RequestProcessorReportStats)(GoString, GoString, GoInt32, GoInt32, GoInt32, GoInt32, GoInt32, GoSlice); -typedef void (*RequestProcessorUninitFn)(); +typedef void* (*CreateInstanceFn)(uint64_t threadId, bool isZTS); +typedef void (*DestroyInstanceFn)(uint64_t threadId); + +// Updated typedefs with instance pointer as first parameter +typedef GoUint8 (*RequestProcessorInitFn)(void* instancePtr, GoString initJson); +typedef GoUint8 (*RequestProcessorContextInitFn)(void* instancePtr, ContextCallback); +typedef GoUint8 (*RequestProcessorConfigUpdateFn)(void* instancePtr, GoString initJson); +typedef char* (*RequestProcessorOnEventFn)(void* instancePtr, GoInt eventId); +typedef int (*RequestProcessorGetBlockingModeFn)(void* instancePtr); +typedef void (*RequestProcessorReportStats)(void* instancePtr, GoString, GoString, GoInt32, GoInt32, GoInt32, GoInt32, GoInt32, GoSlice); +typedef void (*RequestProcessorUninitFn)(void* instancePtr); class RequestProcessor { private: bool initFailed = false; bool requestInitialized = false; void* libHandle = nullptr; + void* requestProcessorInstance = nullptr; uint64_t numberOfRequests = 0; + + // Function pointers to Go-exported functions + CreateInstanceFn createInstanceFn = nullptr; + DestroyInstanceFn destroyInstanceFn = nullptr; + RequestProcessorInitFn requestProcessorInitFn = nullptr; RequestProcessorContextInitFn requestProcessorContextInitFn = nullptr; RequestProcessorConfigUpdateFn requestProcessorConfigUpdateFn = nullptr; RequestProcessorOnEventFn requestProcessorOnEventFn = nullptr; diff --git a/lib/php-extension/include/Utils.h b/lib/php-extension/include/Utils.h index 24b53a4ef..56ae41463 100644 --- a/lib/php-extension/include/Utils.h +++ b/lib/php-extension/include/Utils.h @@ -12,7 +12,7 @@ std::string GetTime(); std::string GetDateTime(); -pid_t GetThreadID(); +uint64_t GetThreadID(); // Returns 0 for NTS, pthread_self() for ZTS const char* GetEventName(EVENT_ID event); diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index c5f6500f2..7eb8fac19 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -2,7 +2,7 @@ package aikido_types import "main/ipc/protos" -type HandlerFunction func() string +type HandlerFunction func(interface{}) string type Method struct { ClassName string diff --git a/lib/request-processor/attack/attack.go b/lib/request-processor/attack/attack.go index a647c994a..3be894dae 100644 --- a/lib/request-processor/attack/attack.go +++ b/lib/request-processor/attack/attack.go @@ -5,7 +5,6 @@ import ( "fmt" "html" "main/context" - "main/globals" "main/grpc" "main/ipc/protos" "main/utils" @@ -35,8 +34,8 @@ func GetHeadersProto() []*protos.Header { /* Construct the AttackDetected protobuf structure to be sent via gRPC to the Agent */ func GetAttackDetectedProto(res utils.InterceptorResult) *protos.AttackDetected { return &protos.AttackDetected{ - Token: globals.CurrentToken, - ServerPid: globals.EnvironmentConfig.ServerPID, + Token: context.GetCurrentServerToken(), + ServerPid: context.GetServerPID(), Request: &protos.Request{ Method: context.GetMethod(), IpAddress: context.GetIp(), @@ -51,7 +50,7 @@ func GetAttackDetectedProto(res utils.InterceptorResult) *protos.AttackDetected Kind: string(res.Kind), Operation: res.Operation, Module: context.GetModule(), - Blocked: utils.IsBlockingEnabled(globals.GetCurrentServer()), + Blocked: utils.IsBlockingEnabled(context.GetCurrentServer()), Source: res.Source, Path: res.PathToPayload, Stack: context.GetStackTrace(), diff --git a/lib/request-processor/config/config.go b/lib/request-processor/config/config.go index 7c327dacc..db52cc882 100644 --- a/lib/request-processor/config/config.go +++ b/lib/request-processor/config/config.go @@ -5,18 +5,28 @@ import ( "fmt" . "main/aikido_types" "main/globals" + "main/instance" "main/log" "main/utils" "os" ) -func UpdateToken(token string) bool { - if token == globals.CurrentToken { +func UpdateToken(inst *instance.RequestProcessorInstance, token string) bool { + server := globals.GetServer(token) + if server == nil { + log.Debugf("Server not found for token \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) + return false + } + + if token == inst.GetCurrentToken() { + if inst.GetCurrentServer() == nil { + inst.SetCurrentServer(server) + } log.Debugf("Token is the same as previous one, skipping config reload...") return false } - globals.CurrentToken = token - globals.CurrentServer = globals.GetServer(token) + inst.SetCurrentToken(token) + inst.SetCurrentServer(server) log.Infof("Token changed to \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) return true } @@ -30,7 +40,7 @@ const ( ReloadWithPastSeenToken ) -func ReloadAikidoConfig(conf *AikidoConfigData, initJson string) ReloadResult { +func ReloadAikidoConfig(inst *instance.RequestProcessorInstance, conf *AikidoConfigData, initJson string) ReloadResult { err := json.Unmarshal([]byte(initJson), conf) if err != nil { return ReloadError @@ -45,18 +55,18 @@ func ReloadAikidoConfig(conf *AikidoConfigData, initJson string) ReloadResult { } if globals.ServerExists(conf.Token) { - if !UpdateToken(conf.Token) { + if !UpdateToken(inst, conf.Token) { return ReloadWithSameToken } return ReloadWithPastSeenToken } server := globals.CreateServer(conf.Token) server.AikidoConfig = *conf - UpdateToken(conf.Token) + UpdateToken(inst, conf.Token) return ReloadWithNewToken } -func Init(initJson string) { +func Init(inst *instance.RequestProcessorInstance, initJson string) { err := json.Unmarshal([]byte(initJson), &globals.EnvironmentConfig) if err != nil { panic(fmt.Sprintf("Error parsing JSON to EnvironmentConfig: %s", err)) @@ -66,7 +76,7 @@ func Init(initJson string) { globals.EnvironmentConfig.RequestProcessorPID = int32(os.Getpid()) conf := AikidoConfigData{} - ReloadAikidoConfig(&conf, initJson) + ReloadAikidoConfig(inst, &conf, initJson) log.Init(conf.DiskLogs) } diff --git a/lib/request-processor/context/cache.go b/lib/request-processor/context/cache.go index 5eb5e4d2f..6ff6014c7 100644 --- a/lib/request-processor/context/cache.go +++ b/lib/request-processor/context/cache.go @@ -3,7 +3,6 @@ package context // #include "../../API.h" import "C" import ( - "main/globals" "main/helpers" "main/log" "main/utils" @@ -25,6 +24,10 @@ func ContextSetMap(contextId int, rawDataPtr **string, parsedPtr **map[string]in return } + if Context.Callback == nil { + return + } + contextData := Context.Callback(contextId) if rawDataPtr != nil { *rawDataPtr = &contextData @@ -43,6 +46,11 @@ func ContextSetString(context_id int, m **string) { if *m != nil { return } + + if Context.Callback == nil { + return + } + temp := Context.Callback(context_id) *m = &temp } @@ -71,6 +79,9 @@ func ContextSetStatusCode() { if Context.StatusCode != nil { return } + if Context.Callback == nil { + return + } status_code_str := Context.Callback(C.CONTEXT_STATUS_CODE) status_code, err := strconv.Atoi(status_code_str) if err != nil { @@ -105,9 +116,12 @@ func ContextSetIp() { if Context.IP != nil { return } + if Context.Callback == nil { + return + } remoteAddress := Context.Callback(C.CONTEXT_REMOTE_ADDRESS) xForwardedFor := Context.Callback(C.CONTEXT_HEADER_X_FORWARDED_FOR) - ip := utils.GetIpFromRequest(globals.GetCurrentServer(), remoteAddress, xForwardedFor) + ip := utils.GetIpFromRequest(GetCurrentServer(), remoteAddress, xForwardedFor) Context.IP = &ip } @@ -116,7 +130,7 @@ func ContextSetIsIpBypassed() { return } - isIpBypassed := utils.IsIpBypassed(globals.GetCurrentServer(), GetIp()) + isIpBypassed := utils.IsIpBypassed(GetCurrentServer(), GetIp()) Context.IsIpBypassed = &isIpBypassed } @@ -132,6 +146,9 @@ func ContextSetRateLimitGroup() { if Context.RateLimitGroup != nil { return } + if Context.Callback == nil { + return + } rateLimitGroup := Context.Callback(C.CONTEXT_RATE_LIMIT_GROUP) Context.RateLimitGroup = &rateLimitGroup } @@ -141,7 +158,11 @@ func ContextSetEndpointConfig() { return } - endpointConfig := utils.GetEndpointConfig(globals.GetCurrentServer(), GetMethod(), GetParsedRoute()) + if GetCurrentServer() == nil { + return + } + + endpointConfig := utils.GetEndpointConfig(GetCurrentServer(), GetMethod(), GetParsedRoute()) Context.EndpointConfig = &endpointConfig } @@ -150,7 +171,11 @@ func ContextSetWildcardEndpointsConfigs() { return } - wildcardEndpointsConfigs := utils.GetWildcardEndpointsConfigs(globals.GetCurrentServer(), GetMethod(), GetParsedRoute()) + if GetCurrentServer() == nil { + return + } + + wildcardEndpointsConfigs := utils.GetWildcardEndpointsConfigs(GetCurrentServer(), GetMethod(), GetParsedRoute()) Context.WildcardEndpointsConfigs = &wildcardEndpointsConfigs } @@ -226,14 +251,15 @@ func ContextSetIsEndpointIpAllowed() { isEndpointIpAllowed := utils.NoConfig + server := GetCurrentServer() endpointConfig := GetEndpointConfig() - if endpointConfig != nil { - isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(globals.GetCurrentServer(), endpointConfig.AllowedIPAddresses, ip) + if endpointConfig != nil && server != nil { + isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(server, endpointConfig.AllowedIPAddresses, ip) } - if isEndpointIpAllowed == utils.NoConfig { + if isEndpointIpAllowed == utils.NoConfig && server != nil { for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig() { - isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(globals.GetCurrentServer(), wildcardEndpointConfig.AllowedIPAddresses, ip) + isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(server, wildcardEndpointConfig.AllowedIPAddresses, ip) if isEndpointIpAllowed != utils.NoConfig { break } diff --git a/lib/request-processor/context/context_for_unit_tests.go b/lib/request-processor/context/context_for_unit_tests.go index 51f8af7f0..a8761476b 100644 --- a/lib/request-processor/context/context_for_unit_tests.go +++ b/lib/request-processor/context/context_for_unit_tests.go @@ -5,9 +5,11 @@ import "C" import ( "encoding/json" "fmt" + . "main/aikido_types" ) var TestContext map[string]string +var TestServer *ServerData // Test server for unit tests func UnitTestsCallback(context_id int) string { switch context_id { @@ -47,6 +49,16 @@ func LoadForUnitTests(context map[string]string) { func UnloadForUnitTests() { Context = RequestContextData{} EventContext = EventContextData{} + TestServer = nil +} + +func SetTestServer(server *ServerData) { + TestServer = server +} + +// GetTestServer returns the current test server, or nil if not set +func GetTestServer() *ServerData { + return TestServer } func GetJsonString(m map[string]interface{}) string { diff --git a/lib/request-processor/context/request_context.go b/lib/request-processor/context/request_context.go index 893783b32..81b237421 100644 --- a/lib/request-processor/context/request_context.go +++ b/lib/request-processor/context/request_context.go @@ -4,7 +4,9 @@ package context import "C" import ( . "main/aikido_types" + "main/globals" "main/log" + "unsafe" ) type CallbackFunction func(int) string @@ -45,8 +47,60 @@ type RequestContextData struct { } var Context RequestContextData +var contextInstance unsafe.Pointer -func Init(callback CallbackFunction) bool { +type requestProcessorInstance struct { + CurrentToken string + CurrentServer *ServerData +} + +func GetCurrentServer() (result *ServerData) { + defer func() { + if r := recover(); r != nil { + result = nil + } + }() + + if contextInstance == nil { + if TestServer != nil { + return GetTestServer() + } + return nil + } + + instPtr := (*requestProcessorInstance)(contextInstance) + if instPtr == nil { + if TestServer != nil { + return GetTestServer() + } + return nil + } + result = instPtr.CurrentServer + return result +} + +func GetCurrentServerToken() string { + if contextInstance == nil { + return "" + } + + instPtr := (*requestProcessorInstance)(contextInstance) + if instPtr == nil { + return "" + } + return instPtr.CurrentToken +} + +func GetServerPID() int32 { + return globals.EnvironmentConfig.ServerPID +} + +func GetInstancePtr() unsafe.Pointer { + return contextInstance +} + +func Init(instPtr unsafe.Pointer, callback CallbackFunction) bool { + contextInstance = instPtr Context = RequestContextData{ Callback: callback, } @@ -67,7 +121,9 @@ func GetFromCache[T any](fetchDataFn func(), s **T) T { } if *s == nil { var t T - log.Warnf("Error getting from cache. Returning default value %v...", t) + if GetCurrentServer() != nil { + log.Warnf("Error getting from cache. Returning default value %v...", t) + } return t } return **s diff --git a/lib/request-processor/globals/globals.go b/lib/request-processor/globals/globals.go index 327c11178..0e3157be7 100644 --- a/lib/request-processor/globals/globals.go +++ b/lib/request-processor/globals/globals.go @@ -8,8 +8,6 @@ import ( var EnvironmentConfig EnvironmentConfigData var Servers = make(map[string]*ServerData) var ServersMutex sync.RWMutex -var CurrentToken string = "" -var CurrentServer *ServerData = nil func NewServerData() *ServerData { return &ServerData{ @@ -22,10 +20,6 @@ func NewServerData() *ServerData { } } -func GetCurrentServer() *ServerData { - return CurrentServer -} - func GetServer(token string) *ServerData { if token == "" { return nil diff --git a/lib/request-processor/grpc/config.go b/lib/request-processor/grpc/config.go index d341e7115..3af692fe2 100644 --- a/lib/request-processor/grpc/config.go +++ b/lib/request-processor/grpc/config.go @@ -8,12 +8,15 @@ import ( "regexp" "runtime" "strings" + "sync" "time" ) var ( - stopChan chan struct{} - cloudConfigTicker = time.NewTicker(1 * time.Minute) + stopChan chan struct{} + cloudConfigTicker = time.NewTicker(1 * time.Minute) + cloudConfigStarted bool + cloudConfigMutex sync.Mutex ) func buildIpList(cloudIpList map[string]*protos.IpList) map[string]IpList { @@ -135,6 +138,14 @@ func setCloudConfig(server *ServerData, cloudConfigFromAgent *protos.CloudConfig } func StartCloudConfigRoutine() { + cloudConfigMutex.Lock() + defer cloudConfigMutex.Unlock() + + if cloudConfigStarted { + return + } + cloudConfigStarted = true + stopChan = make(chan struct{}) go func() { @@ -151,7 +162,12 @@ func StartCloudConfigRoutine() { } func stopCloudConfigRoutine() { + cloudConfigMutex.Lock() + defer cloudConfigMutex.Unlock() + if stopChan != nil { close(stopChan) + stopChan = nil } + cloudConfigStarted = false } diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index 2b4fbe783..f0b3524c6 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -5,8 +5,8 @@ import ( "fmt" "html" "main/context" - "main/globals" "main/grpc" + "main/instance" "main/log" "main/utils" "time" @@ -29,10 +29,10 @@ func GetAction(actionHandling, actionType, trigger, description, data string, re return string(actionJson) } -func OnGetBlockingStatus() string { +func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { log.Debugf("OnGetBlockingStatus called!") - server := globals.GetCurrentServer() + server := inst.GetCurrentServer() if server == nil { return "" } @@ -47,7 +47,7 @@ func OnGetBlockingStatus() string { return GetAction("store", "blocked", "user", "user blocked from config", userId, 403) } - autoBlockingStatus := OnGetAutoBlockingStatus() + autoBlockingStatus := OnGetAutoBlockingStatus(inst) if context.IsIpBypassed() { return "" @@ -75,10 +75,10 @@ func OnGetBlockingStatus() string { return autoBlockingStatus } -func OnGetAutoBlockingStatus() string { +func OnGetAutoBlockingStatus(inst *instance.RequestProcessorInstance) string { log.Debugf("OnGetAutoBlockingStatus called!") - server := globals.GetCurrentServer() + server := inst.GetCurrentServer() if server == nil { return "" } diff --git a/lib/request-processor/handle_path_traversal.go b/lib/request-processor/handle_path_traversal.go index 5267066b9..d56728f4f 100644 --- a/lib/request-processor/handle_path_traversal.go +++ b/lib/request-processor/handle_path_traversal.go @@ -3,11 +3,12 @@ package main import ( "main/attack" "main/context" + "main/instance" "main/log" path_traversal "main/vulnerabilities/path-traversal" ) -func OnPrePathAccessed() string { +func OnPrePathAccessed(inst *instance.RequestProcessorInstance) string { filename := context.GetFilename() filename2 := context.GetFilename2() operation := context.GetFunctionName() diff --git a/lib/request-processor/handle_rate_limit_group_event.go b/lib/request-processor/handle_rate_limit_group_event.go index 0fa7e7e96..2527b9abe 100644 --- a/lib/request-processor/handle_rate_limit_group_event.go +++ b/lib/request-processor/handle_rate_limit_group_event.go @@ -2,10 +2,11 @@ package main import ( "main/context" + "main/instance" "main/log" ) -func OnRateLimitGroupEvent() string { +func OnRateLimitGroupEvent(inst *instance.RequestProcessorInstance) string { context.ContextSetRateLimitGroup() group := context.GetRateLimitGroup() log.Infof("Got rate limit group: %s", group) diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index d08f2dd2c..625c509c4 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -4,14 +4,14 @@ import ( . "main/aikido_types" "main/api_discovery" "main/context" - "main/globals" "main/grpc" + "main/instance" "main/log" "main/utils" webscanner "main/vulnerabilities/web-scanner" ) -func OnPreRequest() string { +func OnPreRequest(inst *instance.RequestProcessorInstance) string { context.Clear() return "" } @@ -32,12 +32,13 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { grpc.OnRequestShutdown(params) } -func OnPostRequest() string { - server := globals.GetCurrentServer() +func OnPostRequest(inst *instance.RequestProcessorInstance) string { + server := inst.GetCurrentServer() if server == nil { return "" } - go OnRequestShutdownReporting(RequestShutdownParams{ + + params := RequestShutdownParams{ Server: server, Method: context.GetMethod(), Route: context.GetRoute(), @@ -47,10 +48,16 @@ func OnPostRequest() string { UserAgent: context.GetUserAgent(), IP: context.GetIp(), RateLimitGroup: context.GetRateLimitGroup(), - APISpec: api_discovery.GetApiInfo(server), RateLimited: context.IsEndpointRateLimited(), QueryParsed: context.GetQueryParsed(), - }) + APISpec: api_discovery.GetApiInfo(server), // Also needs context, must be called before Clear() + } + context.Clear() + + go func() { + OnRequestShutdownReporting(params) + }() + return "" } diff --git a/lib/request-processor/handle_shell_execution.go b/lib/request-processor/handle_shell_execution.go index f120c555a..7e2286f3f 100644 --- a/lib/request-processor/handle_shell_execution.go +++ b/lib/request-processor/handle_shell_execution.go @@ -3,11 +3,12 @@ package main import ( "main/attack" "main/context" + "main/instance" "main/log" shell_injection "main/vulnerabilities/shell-injection" ) -func OnPreShellExecuted() string { +func OnPreShellExecuted(inst *instance.RequestProcessorInstance) string { cmd := context.GetCmd() operation := context.GetFunctionName() if cmd == "" { diff --git a/lib/request-processor/handle_sql_queries.go b/lib/request-processor/handle_sql_queries.go index a92f3bddf..babb3c70d 100644 --- a/lib/request-processor/handle_sql_queries.go +++ b/lib/request-processor/handle_sql_queries.go @@ -3,11 +3,12 @@ package main import ( "main/attack" "main/context" + "main/instance" "main/log" sql_injection "main/vulnerabilities/sql-injection" ) -func OnPreSqlQueryExecuted() string { +func OnPreSqlQueryExecuted(inst *instance.RequestProcessorInstance) string { query := context.GetSqlQuery() dialect := context.GetSqlDialect() operation := context.GetFunctionName() diff --git a/lib/request-processor/handle_urls.go b/lib/request-processor/handle_urls.go index e818aaa46..f0acdd3a7 100644 --- a/lib/request-processor/handle_urls.go +++ b/lib/request-processor/handle_urls.go @@ -3,8 +3,8 @@ package main import ( "main/attack" "main/context" - "main/globals" "main/grpc" + "main/instance" "main/log" ssrf "main/vulnerabilities/ssrf" ) @@ -19,7 +19,7 @@ import ( All these checks first verify if the hostname was provided via user input. Protects both curl and fopen wrapper functions (file_get_contents, etc...). */ -func OnPreOutgoingRequest() string { +func OnPreOutgoingRequest(inst *instance.RequestProcessorInstance) string { if context.IsEndpointProtectionTurnedOff() { log.Infof("Protection is turned off -> will not run detection logic!") return "" @@ -54,7 +54,7 @@ func OnPreOutgoingRequest() string { All these checks first verify if the hostname was provided via user input. Protects curl. */ -func OnPostOutgoingRequest() string { +func OnPostOutgoingRequest(inst *instance.RequestProcessorInstance) string { defer context.ResetEventContext() hostname, port := context.GetOutgoingRequestHostnameAndPort() @@ -66,7 +66,7 @@ func OnPostOutgoingRequest() string { log.Info("[AFTER] Got domain: ", hostname, " port: ", port) - server := globals.GetCurrentServer() + server := inst.GetCurrentServer() if server != nil { go grpc.OnDomain(server, hostname, port) if effectiveHostname != hostname { diff --git a/lib/request-processor/handle_user_event.go b/lib/request-processor/handle_user_event.go index f7a5e66c2..55326e098 100644 --- a/lib/request-processor/handle_user_event.go +++ b/lib/request-processor/handle_user_event.go @@ -2,12 +2,12 @@ package main import ( "main/context" - "main/globals" "main/grpc" + "main/instance" "main/log" ) -func OnUserEvent() string { +func OnUserEvent(inst *instance.RequestProcessorInstance) string { id := context.GetUserId() username := context.GetUserName() ip := context.GetIp() @@ -18,7 +18,7 @@ func OnUserEvent() string { return "" } - server := globals.GetCurrentServer() + server := inst.GetCurrentServer() if server == nil { return "" } diff --git a/lib/request-processor/instance/manager.go b/lib/request-processor/instance/manager.go new file mode 100644 index 000000000..af3078eb9 --- /dev/null +++ b/lib/request-processor/instance/manager.go @@ -0,0 +1,98 @@ +package instance + +import ( + "runtime" + "sync" + "unsafe" +) + +// Global instance storage - unified for both NTS and ZTS +// Key: Thread ID +// +// - NTS: Always use threadID = 0 (single process-wide instance) +// - ZTS: Use pthread_self() (unique per thread) +var ( + instances = make(map[uint64]*RequestProcessorInstance) + instancesMutex sync.RWMutex + pinners = make(map[uint64]runtime.Pinner) // Keeps instances pinned for CGO +) + +// CreateInstance creates and stores a new instance +// +// threadID: +// +// - For NTS: pass 0 (creates/reuses single instance) +// - For ZTS: pass pthread_self() (creates per-thread instance) +// +// isZTS: +// +// - true if running in Franken PHP (ZTS mode) +// - false if running in standard PHP (NTS mode) +// +// Returns: unsafe.Pointer to the instance (for C++ to store) +func CreateInstance(threadID uint64, isZTS bool) unsafe.Pointer { + instancesMutex.Lock() + defer instancesMutex.Unlock() + + // Check if instance already exists for this thread/process + if existingInstance, exists := instances[threadID]; exists { + return unsafe.Pointer(existingInstance) + } + + // Create new instance + instance := NewRequestProcessorInstance(isZTS) + instances[threadID] = instance + + // Pin the instance to prevent garbage collection while C++ holds pointer + var pinner runtime.Pinner + pinner.Pin(instance) + pinners[threadID] = pinner + + return unsafe.Pointer(instance) +} + +// GetInstance retrieves an instance by its pointer +func GetInstance(instancePtr unsafe.Pointer) *RequestProcessorInstance { + if instancePtr == nil { + return nil + } + return (*RequestProcessorInstance)(instancePtr) +} + +// DestroyInstance removes an instance from storage +// +// threadID: +// +// - For NTS: pass 0 +// - For ZTS: pass pthread_self() +func DestroyInstance(threadID uint64) { + instancesMutex.Lock() + defer instancesMutex.Unlock() + + // Unpin the instance to allow garbage collection + if pinner, exists := pinners[threadID]; exists { + pinner.Unpin() + delete(pinners, threadID) + } + + delete(instances, threadID) +} + +// GetAllInstances returns all active instances (for testing/debugging) +func GetAllInstances() []*RequestProcessorInstance { + instancesMutex.RLock() + defer instancesMutex.RUnlock() + + result := make([]*RequestProcessorInstance, 0, len(instances)) + for _, instance := range instances { + result = append(result, instance) + } + return result +} + +// GetInstanceCount returns the number of active instances +func GetInstanceCount() int { + instancesMutex.RLock() + defer instancesMutex.RUnlock() + return len(instances) +} diff --git a/lib/request-processor/instance/wrapper.go b/lib/request-processor/instance/wrapper.go new file mode 100644 index 000000000..0728555d6 --- /dev/null +++ b/lib/request-processor/instance/wrapper.go @@ -0,0 +1,127 @@ +package instance + +import ( + . "main/aikido_types" + "main/context" + "sync" + "unsafe" +) + +// RequestProcessorInstance encapsulates all thread-local/request-scoped globals +type RequestProcessorInstance struct { + // Per-request state (changes with each request/token update) + CurrentToken string + CurrentServer *ServerData + RequestContext context.RequestContextData + ContextInstance unsafe.Pointer // Stores instance pointer for context callbacks + ContextCallback unsafe.Pointer // Callback function pointer (C.ContextCallback) - must be instance-local for ZTS + + // Lock for thread safety (only used/locked in ZTS) + mu sync.Mutex + isZTS bool // Set once at creation time - determines if locking is needed +} + +// NewRequestProcessorInstance creates a new instance +// isZTS: true for Franken PHP (ZTS), false for standard PHP (NTS) +func NewRequestProcessorInstance(isZTS bool) *RequestProcessorInstance { + return &RequestProcessorInstance{ + CurrentToken: "", + CurrentServer: nil, + RequestContext: context.RequestContextData{}, + isZTS: isZTS, + } +} + +// SetCurrentServer updates the current server for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) SetCurrentServer(server *ServerData) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.CurrentServer = server +} + +// GetCurrentServer returns the current server for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) GetCurrentServer() *ServerData { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.CurrentServer +} + +// SetCurrentToken updates the current token for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) SetCurrentToken(token string) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.CurrentToken = token +} + +// GetCurrentToken returns the current token for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) GetCurrentToken() string { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.CurrentToken +} + +// SetRequestContext updates the request context for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) SetRequestContext(ctx context.RequestContextData) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.RequestContext = ctx +} + +// GetRequestContext returns the request context for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) GetRequestContext() *context.RequestContextData { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return &i.RequestContext +} + +// IsInitialized checks if this instance has been initialized +func (i *RequestProcessorInstance) IsInitialized() bool { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.CurrentServer != nil +} + +// IsZTS returns whether this instance is running in ZTS mode +func (i *RequestProcessorInstance) IsZTS() bool { + return i.isZTS +} + +// SetContextCallback stores the context callback for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) SetContextCallback(callback unsafe.Pointer) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.ContextCallback = callback +} + +// GetContextCallback returns the context callback for this instance +// Conditional locking: only locks if ZTS mode +func (i *RequestProcessorInstance) GetContextCallback() unsafe.Pointer { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.ContextCallback +} diff --git a/lib/request-processor/log/log.go b/lib/request-processor/log/log.go index 1161ee991..019bd06b0 100644 --- a/lib/request-processor/log/log.go +++ b/lib/request-processor/log/log.go @@ -6,6 +6,7 @@ import ( "log" "main/globals" "os" + "sync" "time" ) @@ -23,6 +24,7 @@ var ( Logger = log.New(os.Stdout, "", 0) cliLogging = true logFilePath = "" + logMutex sync.RWMutex ) var LogFile *os.File @@ -46,13 +48,21 @@ func (f *AikidoFormatter) Format(level LogLevel, message string) string { if len(message) > 1024 { message = message[:1024] + "... [truncated]" } - if cliLogging { + + logMutex.RLock() + isCliLogging := cliLogging + logMutex.RUnlock() + + if isCliLogging { return fmt.Sprintf("[AIKIDO][%s] %s\n", levelStr, message) } return fmt.Sprintf("[AIKIDO][%s][%s] %s\n", levelStr, time.Now().Format("15:04:05"), message) } func initLogFile() { + logMutex.Lock() + defer logMutex.Unlock() + if cliLogging { return } @@ -68,7 +78,11 @@ func initLogFile() { } func logMessage(level LogLevel, args ...interface{}) { - if level >= currentLogLevel { + logMutex.RLock() + lvl := currentLogLevel + logMutex.RUnlock() + + if level >= lvl { initLogFile() formatter := &AikidoFormatter{} message := fmt.Sprint(args...) @@ -78,7 +92,11 @@ func logMessage(level LogLevel, args ...interface{}) { } func logMessagef(level LogLevel, format string, args ...interface{}) { - if level >= currentLogLevel { + logMutex.RLock() + lvl := currentLogLevel + logMutex.RUnlock() + + if level >= lvl { initLogFile() formatter := &AikidoFormatter{} message := fmt.Sprintf(format, args...) @@ -117,27 +135,37 @@ func Warnf(format string, args ...interface{}) { func Errorf(format string, args ...interface{}) { logMessagef(ErrorLevel, format, args...) - } +// SetLogLevel changes the current log level (thread-safe) func SetLogLevel(level string) error { + var newLevel LogLevel + switch level { case "DEBUG": - currentLogLevel = DebugLevel + newLevel = DebugLevel case "INFO": - currentLogLevel = InfoLevel + newLevel = InfoLevel case "WARN": - currentLogLevel = WarnLevel + newLevel = WarnLevel case "ERROR": - currentLogLevel = ErrorLevel + newLevel = ErrorLevel default: return errors.New("invalid log level") } + + logMutex.Lock() + defer logMutex.Unlock() + currentLogLevel = newLevel return nil } func Init(diskLogs bool) { + logMutex.Lock() + defer logMutex.Unlock() + if !diskLogs { + cliLogging = true return } cliLogging = false @@ -147,7 +175,11 @@ func Init(diskLogs bool) { } func Uninit() { + logMutex.Lock() + defer logMutex.Unlock() + if LogFile != nil { LogFile.Close() + LogFile = nil } } diff --git a/lib/request-processor/main.go b/lib/request-processor/main.go index 57f2278f2..47e3bb40b 100644 --- a/lib/request-processor/main.go +++ b/lib/request-processor/main.go @@ -8,6 +8,7 @@ import ( "main/context" "main/globals" "main/grpc" + "main/instance" "main/log" "main/utils" zen_internals "main/vulnerabilities/zen-internals" @@ -17,17 +18,39 @@ import ( ) var eventHandlers = map[int]HandlerFunction{ - C.EVENT_PRE_REQUEST: OnPreRequest, - C.EVENT_POST_REQUEST: OnPostRequest, - C.EVENT_SET_USER: OnUserEvent, - C.EVENT_SET_RATE_LIMIT_GROUP: OnRateLimitGroupEvent, - C.EVENT_GET_AUTO_BLOCKING_STATUS: OnGetAutoBlockingStatus, - C.EVENT_GET_BLOCKING_STATUS: OnGetBlockingStatus, - C.EVENT_PRE_OUTGOING_REQUEST: OnPreOutgoingRequest, - C.EVENT_POST_OUTGOING_REQUEST: OnPostOutgoingRequest, - C.EVENT_PRE_SHELL_EXECUTED: OnPreShellExecuted, - C.EVENT_PRE_PATH_ACCESSED: OnPrePathAccessed, - C.EVENT_PRE_SQL_QUERY_EXECUTED: OnPreSqlQueryExecuted, + C.EVENT_PRE_REQUEST: func(i interface{}) string { + return OnPreRequest(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_POST_REQUEST: func(i interface{}) string { + return OnPostRequest(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_SET_USER: func(i interface{}) string { + return OnUserEvent(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_SET_RATE_LIMIT_GROUP: func(i interface{}) string { + return OnRateLimitGroupEvent(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_GET_AUTO_BLOCKING_STATUS: func(i interface{}) string { + return OnGetAutoBlockingStatus(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_GET_BLOCKING_STATUS: func(i interface{}) string { + return OnGetBlockingStatus(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_PRE_OUTGOING_REQUEST: func(i interface{}) string { + return OnPreOutgoingRequest(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_POST_OUTGOING_REQUEST: func(i interface{}) string { + return OnPostOutgoingRequest(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_PRE_SHELL_EXECUTED: func(i interface{}) string { + return OnPreShellExecuted(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_PRE_PATH_ACCESSED: func(i interface{}) string { + return OnPrePathAccessed(i.(*instance.RequestProcessorInstance)) + }, + C.EVENT_PRE_SQL_QUERY_EXECUTED: func(i interface{}) string { + return OnPreSqlQueryExecuted(i.(*instance.RequestProcessorInstance)) + }, } func initializeServer(server *ServerData) { @@ -36,8 +59,18 @@ func initializeServer(server *ServerData) { grpc.GetCloudConfig(server, 5*time.Second) } +//export CreateInstance +func CreateInstance(threadID uint64, isZTS bool) unsafe.Pointer { + return instance.CreateInstance(threadID, isZTS) +} + +//export DestroyInstance +func DestroyInstance(threadID uint64) { + instance.DestroyInstance(threadID) +} + //export RequestProcessorInit -func RequestProcessorInit(initJson string) (initOk bool) { +func RequestProcessorInit(instancePtr unsafe.Pointer, initJson string) (initOk bool) { defer func() { if r := recover(); r != nil { log.Warn("Recovered from panic:", r) @@ -45,7 +78,12 @@ func RequestProcessorInit(initJson string) (initOk bool) { } }() - config.Init(initJson) + inst := instance.GetInstance(instancePtr) + if inst == nil { + return false + } + + config.Init(inst, initJson) log.Debugf("Aikido Request Processor v%s (server PID: %d, request processor PID: %d) started in \"%s\" mode!", globals.Version, @@ -54,11 +92,11 @@ func RequestProcessorInit(initJson string) (initOk bool) { globals.EnvironmentConfig.PlatformName, ) log.Debugf("Init data: %s", initJson) - log.Debugf("Started with token: \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(globals.CurrentToken)) + log.Debugf("Started with token: \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(inst.GetCurrentToken())) if globals.EnvironmentConfig.PlatformName != "cli" { grpc.Init() - server := globals.GetCurrentServer() + server := inst.GetCurrentServer() if server != nil { initializeServer(server) } @@ -71,14 +109,22 @@ func RequestProcessorInit(initJson string) (initOk bool) { return true } -var CContextCallback C.ContextCallback - func GoContextCallback(contextId int) string { - if CContextCallback == nil { + // Get the instance from the context package + // This works because context.Init stores the instance pointer + instPtr := context.GetInstancePtr() + inst := instance.GetInstance(instPtr) + if inst == nil { return "" } - contextData := C.call(CContextCallback, C.int(contextId)) + contextCallbackPtr := inst.GetContextCallback() + if contextCallbackPtr == nil { + return "" + } + + contextCallback := (C.ContextCallback)(contextCallbackPtr) + contextData := C.call(contextCallback, C.int(contextId)) if contextData == nil { return "" } @@ -96,7 +142,7 @@ func GoContextCallback(contextId int) string { } //export RequestProcessorContextInit -func RequestProcessorContextInit(contextCallback C.ContextCallback) (initOk bool) { +func RequestProcessorContextInit(instancePtr unsafe.Pointer, contextCallback C.ContextCallback) (initOk bool) { defer func() { if r := recover(); r != nil { log.Warn("Recovered from panic:", r) @@ -104,16 +150,20 @@ func RequestProcessorContextInit(contextCallback C.ContextCallback) (initOk bool } }() - log.Debug("Initializing context...") - CContextCallback = contextCallback - return context.Init(GoContextCallback) + inst := instance.GetInstance(instancePtr) + if inst == nil { + return false + } + + inst.SetContextCallback(unsafe.Pointer(contextCallback)) + return context.Init(instancePtr, GoContextCallback) } /* RequestProcessorConfigUpdate is used to update the Aikido Config loaded from env variables and send this config via gRPC to the Aikido Agent. */ //export RequestProcessorConfigUpdate -func RequestProcessorConfigUpdate(configJson string) (initOk bool) { +func RequestProcessorConfigUpdate(instancePtr unsafe.Pointer, configJson string) (initOk bool) { defer func() { if r := recover(); r != nil { log.Warn("Recovered from panic:", r) @@ -121,11 +171,19 @@ func RequestProcessorConfigUpdate(configJson string) (initOk bool) { } }() + inst := instance.GetInstance(instancePtr) + + if inst == nil { + return false + } + log.Debugf("Reloading Aikido config...") conf := AikidoConfigData{} - reloadResult := config.ReloadAikidoConfig(&conf, configJson) - server := globals.GetCurrentServer() + reloadResult := config.ReloadAikidoConfig(inst, &conf, configJson) + + server := inst.GetCurrentServer() + if server == nil { return false } @@ -145,7 +203,7 @@ func RequestProcessorConfigUpdate(configJson string) (initOk bool) { } //export RequestProcessorOnEvent -func RequestProcessorOnEvent(eventId int) (outputJson *C.char) { +func RequestProcessorOnEvent(instancePtr unsafe.Pointer, eventId int) (outputJson *C.char) { defer func() { if r := recover(); r != nil { log.Warn("Recovered from panic:", r) @@ -153,7 +211,17 @@ func RequestProcessorOnEvent(eventId int) (outputJson *C.char) { } }() - goString := eventHandlers[eventId]() + inst := instance.GetInstance(instancePtr) + if inst == nil { + return nil + } + + handler, exists := eventHandlers[eventId] + if !exists { + return nil + } + + goString := handler(inst) if goString == "" { return nil } @@ -166,22 +234,32 @@ func RequestProcessorOnEvent(eventId int) (outputJson *C.char) { Otherwise, it returns the environment value. */ //export RequestProcessorGetBlockingMode -func RequestProcessorGetBlockingMode() int { - return utils.GetBlockingMode(globals.GetCurrentServer()) +func RequestProcessorGetBlockingMode(instancePtr unsafe.Pointer) int { + inst := instance.GetInstance(instancePtr) + if inst == nil { + return -1 + } + return utils.GetBlockingMode(inst.GetCurrentServer()) } //export RequestProcessorReportStats -func RequestProcessorReportStats(sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { +func RequestProcessorReportStats(instancePtr unsafe.Pointer, sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { if globals.EnvironmentConfig.PlatformName == "cli" { return } + + inst := instance.GetInstance(instancePtr) + if inst == nil { + return + } + clonedTimings := make([]int64, len(timings)) copy(clonedTimings, timings) - go grpc.OnMonitoredSinkStats(globals.GetCurrentServer(), strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) + go grpc.OnMonitoredSinkStats(inst.GetCurrentServer(), strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) } //export RequestProcessorUninit -func RequestProcessorUninit() { +func RequestProcessorUninit(instancePtr unsafe.Pointer) { log.Debug("Uninit: {}") zen_internals.Uninit() diff --git a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go index f0ba26dd5..7007ac56e 100644 --- a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go +++ b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go @@ -2,7 +2,6 @@ package ssrf import ( "main/context" - "main/globals" "main/helpers" "net/url" ) @@ -13,9 +12,9 @@ import ( // This prevents false positives when a server makes requests to itself via different protocols. func IsRequestToItself(outboundHostname string, outboundPort uint32) bool { // Check if trust proxy is enabled - // If not enabled, we don't consider requests to itself as safe - server := globals.GetCurrentServer() - if server == nil || !server.AikidoConfig.TrustProxy { + // If not enabled, we don't consider requests to iteself as safe + server := context.GetCurrentServer() + if server != nil && !server.AikidoConfig.TrustProxy { return false } diff --git a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go index 91aa06b26..b98bd8237 100644 --- a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go +++ b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go @@ -3,21 +3,19 @@ package ssrf import ( "main/aikido_types" "main/context" - "main/globals" "testing" ) func setupTestContext(serverURL string, trustProxy bool) func() { // Setup a mock server with trust proxy setting - server := &aikido_types.ServerData{ + testServer := &aikido_types.ServerData{ AikidoConfig: aikido_types.AikidoConfigData{ TrustProxy: trustProxy, }, } // Store original server and restore it later - originalServer := globals.GetCurrentServer() - globals.CurrentServer = server + context.SetTestServer(testServer) // Use the proper test context loader context.LoadForUnitTests(map[string]string{ @@ -27,7 +25,7 @@ func setupTestContext(serverURL string, trustProxy bool) func() { // Return cleanup function return func() { context.UnloadForUnitTests() - globals.CurrentServer = originalServer + context.SetTestServer(nil) } } diff --git a/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go b/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go index e305b11eb..9a3eb1eff 100644 --- a/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go +++ b/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go @@ -25,15 +25,27 @@ import ( "main/globals" "main/log" "main/utils" + "sync" "unsafe" ) -var ( +type ZenInternalsLibrary struct { handle unsafe.Pointer detectSqlInjection C.detect_sql_injection_func -) + mu sync.RWMutex + initialized bool +} + +var zenLib = &ZenInternalsLibrary{} func Init() bool { + zenLib.mu.Lock() + defer zenLib.mu.Unlock() + + if zenLib.initialized { + return true + } + zenInternalsLibPath := C.CString(fmt.Sprintf("/opt/aikido-%s/libzen_internals_%s-unknown-linux-gnu.so", globals.Version, utils.GetArch())) defer C.free(unsafe.Pointer(zenInternalsLibPath)) @@ -49,26 +61,42 @@ func Init() bool { vDetectSqlInjection := C.dlsym(handle, detectSqlInjectionFnName) if vDetectSqlInjection == nil { log.Error("Failed to load detect_sql_injection function from zen-internals library!") + C.dlclose(handle) return false } - detectSqlInjection = (C.detect_sql_injection_func)(vDetectSqlInjection) + zenLib.handle = handle + zenLib.detectSqlInjection = (C.detect_sql_injection_func)(vDetectSqlInjection) + zenLib.initialized = true log.Debugf("Loaded zen-internals library!") return true } func Uninit() { - detectSqlInjection = nil + zenLib.mu.Lock() + defer zenLib.mu.Unlock() - if handle != nil { - C.dlclose(handle) - handle = nil + if !zenLib.initialized { + return + } + + zenLib.detectSqlInjection = nil + + if zenLib.handle != nil { + C.dlclose(zenLib.handle) + zenLib.handle = nil } + + zenLib.initialized = false } // DetectSQLInjection performs SQL injection detection using the loaded library func DetectSQLInjection(query string, user_input string, dialect int) int { - if detectSqlInjection == nil { + zenLib.mu.RLock() + detectFn := zenLib.detectSqlInjection + zenLib.mu.RUnlock() + + if detectFn == nil { return 0 } @@ -81,7 +109,7 @@ func DetectSQLInjection(query string, user_input string, dialect int) int { queryLen := C.size_t(len(query)) userInputLen := C.size_t(len(user_input)) - result := int(C.call_detect_sql_injection(detectSqlInjection, + result := int(C.call_detect_sql_injection(detectFn, cQuery, queryLen, cUserInput, userInputLen, C.int(dialect))) diff --git a/tools/build.sh b/tools/build.sh index fd4de241b..395ac9345 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -1,3 +1,5 @@ +set -e + export PATH="$PATH:$HOME/go/bin:$HOME/.local/bin" PHP_VERSION=$(php -v | grep -oP 'PHP \K\d+\.\d+' | head -n 1) diff --git a/tools/rpm_full_build.sh b/tools/rpm_full_build.sh index 2b38d6bca..b7dbfab7f 100755 --- a/tools/rpm_full_build.sh +++ b/tools/rpm_full_build.sh @@ -1,2 +1,3 @@ -rpm -e aikido-php-firewall +rpm -e aikido-php-firewall || true +set -e ./tools/build.sh && ./tools/rpm_build.sh && ./tools/rpm_install.sh From 40cd5eea15cac15a64a4ef6df690581d553d1f2c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 18 Nov 2025 14:49:24 +0000 Subject: [PATCH 012/170] Comments refactoring --- lib/request-processor/instance/manager.go | 40 +++++------------------ lib/request-processor/instance/wrapper.go | 35 +++++--------------- 2 files changed, 16 insertions(+), 59 deletions(-) diff --git a/lib/request-processor/instance/manager.go b/lib/request-processor/instance/manager.go index af3078eb9..af85e40f1 100644 --- a/lib/request-processor/instance/manager.go +++ b/lib/request-processor/instance/manager.go @@ -6,44 +6,29 @@ import ( "unsafe" ) -// Global instance storage - unified for both NTS and ZTS -// Key: Thread ID -// -// - NTS: Always use threadID = 0 (single process-wide instance) -// - ZTS: Use pthread_self() (unique per thread) +// Stores instances keyed by thread ID: +// - NTS (standard PHP): threadID is always 0, single instance +// - ZTS (FrankenPHP): threadID is pthread_self(), one per thread var ( instances = make(map[uint64]*RequestProcessorInstance) instancesMutex sync.RWMutex - pinners = make(map[uint64]runtime.Pinner) // Keeps instances pinned for CGO + pinners = make(map[uint64]runtime.Pinner) // Prevents GC while C++ holds pointers ) -// CreateInstance creates and stores a new instance -// -// threadID: -// -// - For NTS: pass 0 (creates/reuses single instance) -// - For ZTS: pass pthread_self() (creates per-thread instance) -// -// isZTS: -// -// - true if running in Franken PHP (ZTS mode) -// - false if running in standard PHP (NTS mode) -// -// Returns: unsafe.Pointer to the instance (for C++ to store) +// CreateInstance creates or reuses an instance for the given thread. +// Returns an unsafe.Pointer for C++ to store. func CreateInstance(threadID uint64, isZTS bool) unsafe.Pointer { instancesMutex.Lock() defer instancesMutex.Unlock() - // Check if instance already exists for this thread/process if existingInstance, exists := instances[threadID]; exists { return unsafe.Pointer(existingInstance) } - // Create new instance instance := NewRequestProcessorInstance(isZTS) instances[threadID] = instance - // Pin the instance to prevent garbage collection while C++ holds pointer + // Pin to prevent GC while C++ holds the pointer var pinner runtime.Pinner pinner.Pin(instance) pinners[threadID] = pinner @@ -51,7 +36,6 @@ func CreateInstance(threadID uint64, isZTS bool) unsafe.Pointer { return unsafe.Pointer(instance) } -// GetInstance retrieves an instance by its pointer func GetInstance(instancePtr unsafe.Pointer) *RequestProcessorInstance { if instancePtr == nil { return nil @@ -59,17 +43,10 @@ func GetInstance(instancePtr unsafe.Pointer) *RequestProcessorInstance { return (*RequestProcessorInstance)(instancePtr) } -// DestroyInstance removes an instance from storage -// -// threadID: -// -// - For NTS: pass 0 -// - For ZTS: pass pthread_self() func DestroyInstance(threadID uint64) { instancesMutex.Lock() defer instancesMutex.Unlock() - // Unpin the instance to allow garbage collection if pinner, exists := pinners[threadID]; exists { pinner.Unpin() delete(pinners, threadID) @@ -78,7 +55,7 @@ func DestroyInstance(threadID uint64) { delete(instances, threadID) } -// GetAllInstances returns all active instances (for testing/debugging) +// GetAllInstances is used for testing and debugging func GetAllInstances() []*RequestProcessorInstance { instancesMutex.RLock() defer instancesMutex.RUnlock() @@ -90,7 +67,6 @@ func GetAllInstances() []*RequestProcessorInstance { return result } -// GetInstanceCount returns the number of active instances func GetInstanceCount() int { instancesMutex.RLock() defer instancesMutex.RUnlock() diff --git a/lib/request-processor/instance/wrapper.go b/lib/request-processor/instance/wrapper.go index 0728555d6..1e4198102 100644 --- a/lib/request-processor/instance/wrapper.go +++ b/lib/request-processor/instance/wrapper.go @@ -7,22 +7,21 @@ import ( "unsafe" ) -// RequestProcessorInstance encapsulates all thread-local/request-scoped globals +// RequestProcessorInstance holds per-request state for each PHP thread. +// In NTS mode (standard PHP), there's one global instance. +// In ZTS mode (FrankenPHP), each thread gets its own instance with locking. type RequestProcessorInstance struct { - // Per-request state (changes with each request/token update) CurrentToken string CurrentServer *ServerData RequestContext context.RequestContextData - ContextInstance unsafe.Pointer // Stores instance pointer for context callbacks - ContextCallback unsafe.Pointer // Callback function pointer (C.ContextCallback) - must be instance-local for ZTS + ContextInstance unsafe.Pointer // For context callbacks + ContextCallback unsafe.Pointer // C function pointer, must be per-instance in ZTS - // Lock for thread safety (only used/locked in ZTS) - mu sync.Mutex - isZTS bool // Set once at creation time - determines if locking is needed + mu sync.Mutex // Only used when isZTS is true + isZTS bool } -// NewRequestProcessorInstance creates a new instance -// isZTS: true for Franken PHP (ZTS), false for standard PHP (NTS) +// NewRequestProcessorInstance creates an instance. Pass isZTS=true for FrankenPHP. func NewRequestProcessorInstance(isZTS bool) *RequestProcessorInstance { return &RequestProcessorInstance{ CurrentToken: "", @@ -32,8 +31,6 @@ func NewRequestProcessorInstance(isZTS bool) *RequestProcessorInstance { } } -// SetCurrentServer updates the current server for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) SetCurrentServer(server *ServerData) { if i.isZTS { i.mu.Lock() @@ -42,8 +39,6 @@ func (i *RequestProcessorInstance) SetCurrentServer(server *ServerData) { i.CurrentServer = server } -// GetCurrentServer returns the current server for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) GetCurrentServer() *ServerData { if i.isZTS { i.mu.Lock() @@ -52,8 +47,6 @@ func (i *RequestProcessorInstance) GetCurrentServer() *ServerData { return i.CurrentServer } -// SetCurrentToken updates the current token for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) SetCurrentToken(token string) { if i.isZTS { i.mu.Lock() @@ -62,8 +55,6 @@ func (i *RequestProcessorInstance) SetCurrentToken(token string) { i.CurrentToken = token } -// GetCurrentToken returns the current token for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) GetCurrentToken() string { if i.isZTS { i.mu.Lock() @@ -72,8 +63,6 @@ func (i *RequestProcessorInstance) GetCurrentToken() string { return i.CurrentToken } -// SetRequestContext updates the request context for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) SetRequestContext(ctx context.RequestContextData) { if i.isZTS { i.mu.Lock() @@ -82,8 +71,6 @@ func (i *RequestProcessorInstance) SetRequestContext(ctx context.RequestContextD i.RequestContext = ctx } -// GetRequestContext returns the request context for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) GetRequestContext() *context.RequestContextData { if i.isZTS { i.mu.Lock() @@ -92,7 +79,6 @@ func (i *RequestProcessorInstance) GetRequestContext() *context.RequestContextDa return &i.RequestContext } -// IsInitialized checks if this instance has been initialized func (i *RequestProcessorInstance) IsInitialized() bool { if i.isZTS { i.mu.Lock() @@ -101,13 +87,10 @@ func (i *RequestProcessorInstance) IsInitialized() bool { return i.CurrentServer != nil } -// IsZTS returns whether this instance is running in ZTS mode func (i *RequestProcessorInstance) IsZTS() bool { return i.isZTS } -// SetContextCallback stores the context callback for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) SetContextCallback(callback unsafe.Pointer) { if i.isZTS { i.mu.Lock() @@ -116,8 +99,6 @@ func (i *RequestProcessorInstance) SetContextCallback(callback unsafe.Pointer) { i.ContextCallback = callback } -// GetContextCallback returns the context callback for this instance -// Conditional locking: only locks if ZTS mode func (i *RequestProcessorInstance) GetContextCallback() unsafe.Pointer { if i.isZTS { i.mu.Lock() From e3383b36a3ef99fde658c9879023acce3acd92ec Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 19 Nov 2025 10:54:04 +0000 Subject: [PATCH 013/170] Revert to using CallPhpFunctionWithOneParam --- lib/php-extension/Action.cpp | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index e84d0c070..34bd795c5 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -5,8 +5,8 @@ ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); - SG(sapi_headers).http_response_code = _code; zend_throw_exception(zend_exception_get_default(), _message.c_str(), _code); + CallPhpFunctionWithOneParam("http_response_code", _code); return BLOCK; } @@ -16,14 +16,8 @@ ACTION_STATUS Action::executeExit(json &event) { // CallPhpFunction("ob_clean"); CallPhpFunction("header_remove"); - SG(sapi_headers).http_response_code = _response_code; - - sapi_header_line ctr = {0}; - ctr.line = CONTENT_TYPE_HEADER; - ctr.line_len = sizeof(CONTENT_TYPE_HEADER) - 1; - ctr.response_code = 0; - sapi_header_op(SAPI_HEADER_REPLACE, &ctr); - + CallPhpFunctionWithOneParam("http_response_code", _response_code); + CallPhpFunctionWithOneParam("header", "Content-Type: text/plain"); CallPhpEcho(_message); CallPhpExit(); return EXIT; From 03ce80b5847db11b45957447d7e87ab4c2a3fa0b Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 19 Nov 2025 11:07:42 +0000 Subject: [PATCH 014/170] Remove unused CONTENT_TYPE_HEADER definition from Action.cpp --- lib/php-extension/Action.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index 34bd795c5..0771a2724 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -1,7 +1,5 @@ #include "Includes.h" -#define CONTENT_TYPE_HEADER "Content-Type: text/plain" - ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); From 11c2f569a76967dcb8d6a3f682b7c673b1b724c8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 19 Nov 2025 11:39:33 +0000 Subject: [PATCH 015/170] Refactor event handler mappings in RequestProcessor. --- lib/request-processor/aikido_types/handle.go | 2 - lib/request-processor/main.go | 46 ++++++-------------- 2 files changed, 13 insertions(+), 35 deletions(-) diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index 7eb8fac19..d2cfbb4b6 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -2,8 +2,6 @@ package aikido_types import "main/ipc/protos" -type HandlerFunction func(interface{}) string - type Method struct { ClassName string MethodName string diff --git a/lib/request-processor/main.go b/lib/request-processor/main.go index 47e3bb40b..dc04a8118 100644 --- a/lib/request-processor/main.go +++ b/lib/request-processor/main.go @@ -17,40 +17,20 @@ import ( "unsafe" ) +type HandlerFunction func(*instance.RequestProcessorInstance) string + var eventHandlers = map[int]HandlerFunction{ - C.EVENT_PRE_REQUEST: func(i interface{}) string { - return OnPreRequest(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_POST_REQUEST: func(i interface{}) string { - return OnPostRequest(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_SET_USER: func(i interface{}) string { - return OnUserEvent(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_SET_RATE_LIMIT_GROUP: func(i interface{}) string { - return OnRateLimitGroupEvent(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_GET_AUTO_BLOCKING_STATUS: func(i interface{}) string { - return OnGetAutoBlockingStatus(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_GET_BLOCKING_STATUS: func(i interface{}) string { - return OnGetBlockingStatus(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_PRE_OUTGOING_REQUEST: func(i interface{}) string { - return OnPreOutgoingRequest(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_POST_OUTGOING_REQUEST: func(i interface{}) string { - return OnPostOutgoingRequest(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_PRE_SHELL_EXECUTED: func(i interface{}) string { - return OnPreShellExecuted(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_PRE_PATH_ACCESSED: func(i interface{}) string { - return OnPrePathAccessed(i.(*instance.RequestProcessorInstance)) - }, - C.EVENT_PRE_SQL_QUERY_EXECUTED: func(i interface{}) string { - return OnPreSqlQueryExecuted(i.(*instance.RequestProcessorInstance)) - }, + C.EVENT_PRE_REQUEST: OnPreRequest, + C.EVENT_POST_REQUEST: OnPostRequest, + C.EVENT_SET_USER: OnUserEvent, + C.EVENT_SET_RATE_LIMIT_GROUP: OnRateLimitGroupEvent, + C.EVENT_GET_AUTO_BLOCKING_STATUS: OnGetAutoBlockingStatus, + C.EVENT_GET_BLOCKING_STATUS: OnGetBlockingStatus, + C.EVENT_PRE_OUTGOING_REQUEST: OnPreOutgoingRequest, + C.EVENT_POST_OUTGOING_REQUEST: OnPostOutgoingRequest, + C.EVENT_PRE_SHELL_EXECUTED: OnPreShellExecuted, + C.EVENT_PRE_PATH_ACCESSED: OnPrePathAccessed, + C.EVENT_PRE_SQL_QUERY_EXECUTED: OnPreSqlQueryExecuted, } func initializeServer(server *ServerData) { From 55c1666a2feec02cef9a47f90f3243181b142774 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 19 Nov 2025 17:38:05 +0200 Subject: [PATCH 016/170] Add GitHub workflows for building PHP test images and extensions in ZTS mode. The ZTS tests mirrors NTS tests(without apache-mod-php). --- .../workflows/Dockerfile.build-extension-zts | 78 +++++++++ .../workflows/Dockerfile.centos-php-test-zts | 91 ++++++++++ .../workflows/Dockerfile.ubuntu-php-test-zts | 126 ++++++++++++++ .../build-centos-php-test-images-zts.yml | 95 +++++++++++ .../workflows/build-extension-images-zts.yml | 114 +++++++++++++ .../build-ubuntu-php-test-images-zts.yml | 90 ++++++++++ .github/workflows/build.yml | 157 ++++++++++++++++++ 7 files changed, 751 insertions(+) create mode 100644 .github/workflows/Dockerfile.build-extension-zts create mode 100644 .github/workflows/Dockerfile.centos-php-test-zts create mode 100644 .github/workflows/Dockerfile.ubuntu-php-test-zts create mode 100644 .github/workflows/build-centos-php-test-images-zts.yml create mode 100644 .github/workflows/build-extension-images-zts.yml create mode 100644 .github/workflows/build-ubuntu-php-test-images-zts.yml diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts new file mode 100644 index 000000000..da8799f10 --- /dev/null +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -0,0 +1,78 @@ +# syntax=docker/dockerfile:1.7 + +ARG BASE_IMAGE=ubuntu:20.04 +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} + +FROM ${BASE_IMAGE} AS base +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] + +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + LANGUAGE=C.UTF-8 + +RUN apt-get update \ + && apt-get install -y --no-install-recommends tzdata ca-certificates git wget curl xz-utils \ + && ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime \ + && echo "${TZ}" > /etc/timezone \ + && dpkg-reconfigure -f noninteractive tzdata \ + && update-ca-certificates \ + && git config --global http.sslCAInfo /etc/ssl/certs/ca-certificates.crt \ + && git config --global http.sslVerify true \ + && rm -rf /var/lib/apt/lists/* + +# Builder: toolchain + dev libs +FROM base AS build-deps +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential autoconf bison re2c pkg-config \ + libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ + libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ + libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + wget tar \ + && rm -rf /var/lib/apt/lists/* + + +# Fetch php-src +FROM build-deps AS php-src +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src +RUN ./buildconf --force + + +# Build PHP with ZTS enabled +FROM php-src AS php-build +# Configure flags with --enable-zts for thread safety +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-zts \ + --enable-mbstring \ + --enable-pcntl \ + --enable-intl \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ + && make -j"$(nproc)" \ + && make install \ + && strip /usr/local/bin/php || true + + +FROM build-deps AS dev +COPY --from=php-build /usr/local /usr/local +# Sanity check: verify ZTS is enabled and required extensions are available +RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null +ENV PATH="/usr/local/bin:${PATH}" + +RUN mkdir -p /usr/local/etc/php/conf.d +WORKDIR /work +CMD ["php", "-v"] + + diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts new file mode 100644 index 000000000..e03c6ed65 --- /dev/null +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -0,0 +1,91 @@ +# syntax=docker/dockerfile:1.7 +# CentOS Stream 9 test image with PHP built from source in ZTS mode +# Used for testing the extension with FrankenPHP and other ZTS environments + +ARG BASE_IMAGE=quay.io/centos/centos:stream9 +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} + +FROM ${BASE_IMAGE} AS base +SHELL ["/bin/bash", "-euo", "pipefail", "-c"] + +ENV TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} + +RUN yum install -y yum-utils && \ + dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true + +# Install minimal tools needed for re2c build (replace curl-minimal with full curl) +RUN yum install -y xz tar gcc gcc-c++ make + +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz + +# Install remaining build dependencies and tools +RUN yum install -y autoconf bison pkgconfig \ + libxml2-devel sqlite-devel libcurl-devel openssl-devel \ + libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ + libicu-devel readline-devel libxslt-devel \ + git wget \ + python3 python3-devel python3-pip \ + nginx httpd procps-ng \ + && yum clean all + +# Install mariadb-devel separately (may need different repo or skip if not critical) +RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" + +# Fetch and build PHP from source with ZTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src +RUN ./buildconf --force + +# Build PHP with ZTS enabled +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-zts \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --enable-intl \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +# Final image with PHP and test infrastructure +FROM base AS final +COPY --from=php-build /usr/local /usr/local + +# Verify ZTS is enabled +RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" + +# Python deps used by test harness +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir flask requests psutil + +# Quality-of-life +WORKDIR /work +CMD ["bash"] + + diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts new file mode 100644 index 000000000..9dcd47c9e --- /dev/null +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -0,0 +1,126 @@ +# syntax=docker/dockerfile:1.7 +# Ubuntu test image with PHP built from source in ZTS mode +# Used for testing the extension with FrankenPHP and other ZTS environments + +ARG DEBIAN_FRONTEND=noninteractive +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} + +FROM ubuntu:24.04 AS base +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] + +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + LANGUAGE=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} + +# Install base dependencies and build tools +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates curl gnupg lsb-release tzdata locales \ + software-properties-common apt-transport-https \ + git make unzip xz-utils \ + build-essential autoconf bison re2c pkg-config \ + libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ + libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ + libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* + +# Timezone to UTC +RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ + echo "${TZ}" > /etc/timezone && \ + dpkg-reconfigure -f noninteractive tzdata + +# Fetch and build PHP from source with ZTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src +RUN ./buildconf --force + +# Build PHP with ZTS enabled +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-zts \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --enable-intl \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +# Final image with PHP and test infrastructure +FROM base AS final +COPY --from=php-build /usr/local /usr/local + +# Verify ZTS is enabled +RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" + +# Install web servers and database (without PHP packages) +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + nginx \ + apache2 \ + mariadb-server \ + apache2-bin \ + && rm -rf /var/lib/apt/lists/* + +# Apache: switch to prefork for mod_php scenario and enable rewrite +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork rewrite || true + +# ---- Python toolchain used by tests ---- +ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/opt/ci-venv \ + PATH="/opt/ci-venv/bin:${PATH}" + +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 python3-venv python3-pip python3-dev \ + && python3 -m venv "$VIRTUAL_ENV" \ + && "$VIRTUAL_ENV/bin/pip" install --no-cache-dir \ + flask pandas psutil requests \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +# Helper: start MariaDB +RUN mkdir -p /usr/local/bin /var/lib/mysql /run/mysqld && \ + printf '%s\n' '#!/usr/bin/env bash' \ + 'set -euo pipefail' \ + 'mkdir -p /var/lib/mysql /run/mysqld' \ + 'chown -R mysql:mysql /var/lib/mysql /run/mysqld' \ + 'if [ ! -d /var/lib/mysql/mysql ]; then' \ + ' mysqld --initialize-insecure --user=mysql --datadir=/var/lib/mysql' \ + 'fi' \ + 'mysqld --user=mysql --datadir=/var/lib/mysql &' \ + 'pid=$!' \ + 'for i in {1..30}; do mysqladmin ping --silent && break; sleep 1; done' \ + 'mysql -u root -e "CREATE DATABASE IF NOT EXISTS db;" || true' \ + 'mysql -u root -e "ALTER USER '\''root'\''@'\''localhost'\'' IDENTIFIED BY '\''pwd'\''; FLUSH PRIVILEGES;" || true' \ + 'wait $pid' \ + > /usr/local/bin/start-mariadb && \ + chmod +x /usr/local/bin/start-mariadb + +# Create PHP-CGI symlink for CGI tests (using source-built PHP) +RUN mkdir -p /usr/lib/cgi-bin && \ + ln -sf /usr/local/bin/php-cgi /usr/lib/cgi-bin/php-cgi || \ + (echo "Note: php-cgi may not be available in source build" && true) + +WORKDIR /work +CMD ["bash"] + + diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml new file mode 100644 index 000000000..b81295a44 --- /dev/null +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -0,0 +1,95 @@ +name: Build CentOS PHP test images (ZTS) + +on: + workflow_dispatch: + push: + paths: + - .github/workflows/Dockerfile.centos-php-test-zts + - .github/workflows/build-centos-php-test-images-zts.yml + +env: + REGISTRY: ghcr.io + IMAGE_NAME: aikidosec/firewall-php-test-centos-zts + VERSION: v1 + +jobs: + build-amd64: + runs-on: ubuntu-24.04 + strategy: + matrix: + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build & push (amd64) + uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.centos-php-test-zts + platforms: linux/amd64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + + build-arm64: + runs-on: ubuntu-24.04-arm + strategy: + matrix: + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build & push (arm64) + uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.centos-php-test-zts + platforms: linux/arm64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + + publish-manifests: + runs-on: ubuntu-24.04 + needs: [build-amd64, build-arm64] + strategy: + matrix: + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Create multi-arch manifest + run: | + IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + V=${{ matrix.php_version }} + docker buildx imagetools create \ + --tag ${IMAGE}:${V}-${{ env.VERSION }} \ + ${IMAGE}:${V}-amd64-${{ env.VERSION }} \ + ${IMAGE}:${V}-arm64-${{ env.VERSION }} + diff --git a/.github/workflows/build-extension-images-zts.yml b/.github/workflows/build-extension-images-zts.yml new file mode 100644 index 000000000..d5526fafc --- /dev/null +++ b/.github/workflows/build-extension-images-zts.yml @@ -0,0 +1,114 @@ +name: Create images for building extension (ZTS) + +on: + workflow_dispatch: + push: + paths: + - .github/workflows/Dockerfile.build-extension-zts + - .github/workflows/build-extension-images-zts.yml + +env: + REGISTRY: ghcr.io + IMAGE_NAME: aikidosec/firewall-php-build-extension-zts + VERSION: v1 + +jobs: + build-amd64: + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v4 + + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push (amd64) + uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.build-extension-zts + target: dev + platforms: linux/amd64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + PHP_SRC_REF=PHP-${{ matrix.php_version }} + tags: | + ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + + build-arm64: + runs-on: ubuntu-24.04-arm + strategy: + fail-fast: false + matrix: + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v4 + + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build & push (arm64) + uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.build-extension-zts + target: dev + platforms: linux/arm64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + PHP_SRC_REF=PHP-${{ matrix.php_version }} + tags: | + ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + + + # ---- stitch images into multi-arch manifests ---- + publish-manifests: + runs-on: ubuntu-24.04 + needs: [build-amd64, build-arm64] + strategy: + fail-fast: false + matrix: + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + permissions: + contents: read + packages: write + steps: + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Create multi-arch manifest + run: | + IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + V=${{ matrix.php_version }} + docker buildx imagetools create \ + --tag ${IMAGE}:${V}-${{ env.VERSION }} \ + ${IMAGE}:${V}-amd64-${{ env.VERSION }} \ + ${IMAGE}:${V}-arm64-${{ env.VERSION }} + diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml new file mode 100644 index 000000000..c2dbd1fa6 --- /dev/null +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -0,0 +1,90 @@ +name: Build Ubuntu PHP test images (ZTS) + +on: + workflow_dispatch: + push: + paths: + - .github/workflows/Dockerfile.ubuntu-php-test-zts + - .github/workflows/build-ubuntu-php-test-images-zts.yml + +env: + REGISTRY: ghcr.io + IMAGE_NAME: aikidosec/firewall-php-test-ubuntu-zts + VERSION: v1 + +jobs: + build-amd64: + runs-on: ubuntu-24.04 + strategy: + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.ubuntu-php-test-zts + platforms: linux/amd64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + + build-arm64: + runs-on: ubuntu-24.04-arm + strategy: + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/build-push-action@v6 + with: + context: . + file: .github/workflows/Dockerfile.ubuntu-php-test-zts + platforms: linux/arm64 + push: true + build-args: | + PHP_VERSION=${{ matrix.php_version }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + + publish-manifests: + runs-on: ubuntu-24.04 + needs: [build-amd64, build-arm64] + strategy: + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + fail-fast: false + permissions: { contents: read, packages: write } + steps: + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Create multi-arch manifest + run: | + IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + V=${{ matrix.php_version }} + docker buildx imagetools create \ + --tag ${IMAGE}:${V}-${{ env.VERSION }} \ + ${IMAGE}:${V}-amd64-${{ env.VERSION }} \ + ${IMAGE}:${V}-arm64-${{ env.VERSION }} + diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 98dc4b233..87dca3a21 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -449,6 +449,163 @@ jobs: cd tools python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + test_php_ubuntu_zts: + name: Ubuntu ZTS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + runs-on: ubuntu-24.04${{ matrix.arch }} + container: + image: ghcr.io/aikidosec/firewall-php-test-ubuntu-zts:${{ matrix.php_version }}-v1 + options: --privileged + needs: [ build_deb ] + strategy: + matrix: + arch: ['', '-arm'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + server: ['nginx-php-fpm', 'php-built-in'] + fail-fast: false + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Get Arch + run: echo "ARCH=$(uname -m)" >> $GITHUB_ENV + + - name: Set env + run: | + AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') + echo $AIKIDO_VERSION + echo "AIKIDO_VERSION=$AIKIDO_VERSION" >> $GITHUB_ENV + echo "AIKIDO_DEB=aikido-php-firewall.${{ env.ARCH }}.deb" >> $GITHUB_ENV + + - name: Verify ZTS is enabled + run: | + php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + pattern: | + ${{ env.AIKIDO_DEB }} + + - name: Prepare php-fpm + if: matrix.server == 'nginx-php-fpm' + run: | + # Verify ZTS-built PHP-FPM exists and is ZTS-enabled + /usr/local/sbin/php-fpm -v | grep -q "(ZTS)" || (echo "ERROR: PHP-FPM not built with ZTS!" && exit 1) + /usr/local/sbin/php-fpm -v + # Create symlink for nginx to find php-fpm + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + # Verify symlink works + php-fpm -v | grep -q "(ZTS)" || (echo "ERROR: php-fpm symlink not working!" && exit 1) + + - name: Start MariaDB (background) + run: | + start-mariadb & # provided by the image + sleep 5 + mysql -u root -ppwd -e "SELECT 1" || (echo "MySQL not up" && exit 1) + + - name: Install DEB + run: | + dpkg -i -E ${{ env.AIKIDO_DEB }}/${{ env.AIKIDO_DEB }} + + - name: Run CLI tests + run: | + php lib/php-extension/run-tests.php ./tests/cli + + - name: Run ${{ matrix.server }} server tests + run: | + cd tools + python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + + test_php_centos_zts: + name: CentOS ZTS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + runs-on: ubuntu-24.04${{ matrix.arch }} + container: + image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v1 + options: --privileged + needs: [ build_rpm ] + strategy: + matrix: + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + server: ['nginx-php-fpm', 'php-built-in'] + arch: ['', '-arm'] + fail-fast: false + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup + run: | + uname -a + cat /etc/centos-release || cat /etc/redhat-release || echo "CentOS/Stream detected" + php -v + nginx -v || true + which php-fpm && php-fpm -v || true + + - name: Verify ZTS is enabled + run: | + php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v + + - name: Install and start MySQL + run: | + mkdir -p /var/lib/mysql + mysqld --initialize-insecure --datadir=/var/lib/mysql + mysqld -u root --datadir=/var/lib/mysql --socket=/var/lib/mysql/mysql.sock & + sleep 10 + mysql -u root -e "CREATE DATABASE IF NOT EXISTS db;" + mysql -u root -e "ALTER USER 'root'@'localhost' IDENTIFIED BY 'pwd'; FLUSH PRIVILEGES;" + + - name: Test MySQL connection with mysqli + run: | + php -r ' + $mysqli = new mysqli("localhost", "root", "pwd", "db"); + if ($mysqli->connect_error) { + echo "MySQL connection failed: " . $mysqli->connect_error . "\n"; + exit(1); + } else { + echo "MySQL connection successful\n"; + $mysqli->close(); + } + ' + + - name: Get Arch + run: echo "ARCH=$(uname -m)" >> $GITHUB_ENV + + - name: Check PHP setup + run: | + uname -m + php -v + php -i | head -20 + + - name: Get Aikido version + run: | + AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') + echo $AIKIDO_VERSION + echo "AIKIDO_VERSION=$AIKIDO_VERSION" >> $GITHUB_ENV + echo "AIKIDO_RPM=aikido-php-firewall.${{ env.ARCH }}.rpm" >> $GITHUB_ENV + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + pattern: | + ${{ env.AIKIDO_RPM }} + + - name: Install RPM + run: | + rpm -Uvh --oldpackage ${{ env.AIKIDO_RPM }}/${{ env.AIKIDO_RPM }} + + - name: Run CLI tests + run: | + export TEST_PHP_EXECUTABLE=/usr/local/bin/php + php lib/php-extension/run-tests.php ./tests/cli + + - name: Run ${{ matrix.server }} server tests + run: | + cd tools + python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + test_php_qa_action_controlling_tests_apache_mod_php: name: QA apache-mod-php runs-on: ubuntu-latest From 454662857e804c4e98c7bbdf3ac5ea27ad67dcc7 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 12:50:02 +0200 Subject: [PATCH 017/170] Enable maintainer ZTS option in Dockerfiles for PHP builds --- .github/workflows/Dockerfile.build-extension-zts | 7 ++++--- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index da8799f10..4d30959bb 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -51,6 +51,7 @@ RUN ./configure \ --with-config-file-path=/usr/local/lib \ --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ + --enable-maintainer-zts \ --enable-mbstring \ --enable-pcntl \ --enable-intl \ @@ -59,9 +60,9 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - && make -j"$(nproc)" \ - && make install \ - && strip /usr/local/bin/php || true +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php || true FROM build-deps AS dev diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index e03c6ed65..479d77880 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -57,6 +57,7 @@ RUN ./configure \ --with-config-file-path=/usr/local/lib \ --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ + --enable-maintainer-zts \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9dcd47c9e..97d1238f6 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -47,6 +47,7 @@ RUN ./configure \ --with-config-file-path=/usr/local/lib \ --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ + --enable-maintainer-zts \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ From 6045f60456a114c0f4581d63326e35201a56c464 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 13:15:00 +0200 Subject: [PATCH 018/170] Patch openssl.c for OpenSSL compatibility in Dockerfiles for PHP builds --- .github/workflows/Dockerfile.build-extension-zts | 14 ++++++++++++++ .github/workflows/Dockerfile.centos-php-test-zts | 15 +++++++++++++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 15 +++++++++++++++ 3 files changed, 44 insertions(+) diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index 4d30959bb..60183077e 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -42,6 +42,20 @@ RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src WORKDIR /usr/src/php-src RUN ./buildconf --force +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true # Build PHP with ZTS enabled FROM php-src AS php-build diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 479d77880..a335b1306 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -51,6 +51,21 @@ RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src WORKDIR /usr/src/php-src RUN ./buildconf --force +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 97d1238f6..9a760319b 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -41,6 +41,21 @@ RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src WORKDIR /usr/src/php-src RUN ./buildconf --force +# Patch openssl.c for OpenSSL compatibility +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ From 04239991be84eeb75b655c3fbad37be12de4e141 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 13:29:03 +0200 Subject: [PATCH 019/170] Debug php -v --- .github/workflows/Dockerfile.build-extension-zts | 5 ++++- .github/workflows/Dockerfile.centos-php-test-zts | 5 ++++- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 ++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index 60183077e..9160b3fbd 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -82,7 +82,10 @@ RUN ./configure \ FROM build-deps AS dev COPY --from=php-build /usr/local /usr/local # Sanity check: verify ZTS is enabled and required extensions are available -RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN echo "=== DEBUG: php -v output ===" && \ + php -v && \ + echo "=== END DEBUG ===" && \ + php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index a335b1306..383a72e88 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -91,7 +91,10 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local # Verify ZTS is enabled -RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN echo "=== DEBUG: php -v output ===" && \ + php -v && \ + echo "=== END DEBUG ===" && \ + php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9a760319b..ab88e469a 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -81,7 +81,10 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local # Verify ZTS is enabled -RUN php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN echo "=== DEBUG: php -v output ===" && \ + php -v && \ + echo "=== END DEBUG ===" && \ + php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" From 815e1846116db80bf587175de7d7dd352a4ded65 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 13:51:10 +0200 Subject: [PATCH 020/170] Update ZTS verification in workflows and remove unused intl extension from Dockerfiles --- .github/workflows/Dockerfile.build-extension-zts | 6 +----- .github/workflows/Dockerfile.centos-php-test-zts | 6 +----- .github/workflows/Dockerfile.ubuntu-php-test-zts | 6 +----- .github/workflows/build.yml | 8 ++++---- 4 files changed, 7 insertions(+), 19 deletions(-) diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index 9160b3fbd..3ee96178e 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -68,7 +68,6 @@ RUN ./configure \ --enable-maintainer-zts \ --enable-mbstring \ --enable-pcntl \ - --enable-intl \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -82,10 +81,7 @@ RUN ./configure \ FROM build-deps AS dev COPY --from=php-build /usr/local /usr/local # Sanity check: verify ZTS is enabled and required extensions are available -RUN echo "=== DEBUG: php -v output ===" && \ - php -v && \ - echo "=== END DEBUG ===" && \ - php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 383a72e88..3f58c7bd3 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -76,7 +76,6 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ - --enable-intl \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -91,10 +90,7 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local # Verify ZTS is enabled -RUN echo "=== DEBUG: php -v output ===" && \ - php -v && \ - echo "=== END DEBUG ===" && \ - php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index ab88e469a..08c06488d 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -66,7 +66,6 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ - --enable-intl \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -81,10 +80,7 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local # Verify ZTS is enabled -RUN echo "=== DEBUG: php -v output ===" && \ - php -v && \ - echo "=== END DEBUG ===" && \ - php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ +RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 87dca3a21..60b18b37e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -478,7 +478,7 @@ jobs: - name: Verify ZTS is enabled run: | - php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) php -v - name: Download artifacts @@ -491,12 +491,12 @@ jobs: if: matrix.server == 'nginx-php-fpm' run: | # Verify ZTS-built PHP-FPM exists and is ZTS-enabled - /usr/local/sbin/php-fpm -v | grep -q "(ZTS)" || (echo "ERROR: PHP-FPM not built with ZTS!" && exit 1) + /usr/local/sbin/php-fpm -v | grep -q "ZTS" || (echo "ERROR: PHP-FPM not built with ZTS!" && exit 1) /usr/local/sbin/php-fpm -v # Create symlink for nginx to find php-fpm ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true # Verify symlink works - php-fpm -v | grep -q "(ZTS)" || (echo "ERROR: php-fpm symlink not working!" && exit 1) + php-fpm -v | grep -q "ZTS" || (echo "ERROR: php-fpm symlink not working!" && exit 1) - name: Start MariaDB (background) run: | @@ -545,7 +545,7 @@ jobs: - name: Verify ZTS is enabled run: | - php -v | grep -q "(ZTS)" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) php -v - name: Install and start MySQL From 2bcacfc27bbb33b64d7870ca94c2488b7f5d6e79 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 14:29:05 +0200 Subject: [PATCH 021/170] Add PHP-FPM configuration files to Dockerfiles and update ZTS verification in workflows --- .../workflows/Dockerfile.centos-php-test-zts | 40 ++++++++++++++++++- .../workflows/Dockerfile.ubuntu-php-test-zts | 40 ++++++++++++++++++- .github/workflows/build.yml | 4 +- 3 files changed, 80 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 3f58c7bd3..634fa4280 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -37,7 +37,7 @@ RUN yum install -y autoconf bison pkgconfig \ libicu-devel readline-devel libxslt-devel \ git wget \ python3 python3-devel python3-pip \ - nginx httpd procps-ng \ + nginx httpd procps-ng mysql-server \ && yum clean all # Install mariadb-devel separately (may need different repo or skip if not critical) @@ -85,6 +85,44 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +# Install PHP-FPM configuration files +RUN mkdir -p /usr/local/etc/php-fpm.d && \ + if [ -f sapi/fpm/php-fpm.conf ]; then \ + cp sapi/fpm/php-fpm.conf /usr/local/etc/php-fpm.conf.default || \ + cp sapi/fpm/php-fpm.conf.default /usr/local/etc/php-fpm.conf.default || true; \ + fi && \ + if [ -f sapi/fpm/www.conf ]; then \ + cp sapi/fpm/www.conf /usr/local/etc/php-fpm.d/www.conf.default || \ + cp sapi/fpm/www.conf.default /usr/local/etc/php-fpm.d/www.conf.default || true; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ + if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ + cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ + else \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ + fi; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ + if [ -f /usr/local/etc/php-fpm.d/www.conf.default ]; then \ + cp /usr/local/etc/php-fpm.d/www.conf.default /usr/local/etc/php-fpm.d/www.conf; \ + else \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ + fi; \ + fi + # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 08c06488d..7c57de9d1 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -75,6 +75,44 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +# Install PHP-FPM configuration files +RUN mkdir -p /usr/local/etc/php-fpm.d && \ + if [ -f sapi/fpm/php-fpm.conf ]; then \ + cp sapi/fpm/php-fpm.conf /usr/local/etc/php-fpm.conf.default || \ + cp sapi/fpm/php-fpm.conf.default /usr/local/etc/php-fpm.conf.default || true; \ + fi && \ + if [ -f sapi/fpm/www.conf ]; then \ + cp sapi/fpm/www.conf /usr/local/etc/php-fpm.d/www.conf.default || \ + cp sapi/fpm/www.conf.default /usr/local/etc/php-fpm.d/www.conf.default || true; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ + if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ + cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ + else \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ + fi; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ + if [ -f /usr/local/etc/php-fpm.d/www.conf.default ]; then \ + cp /usr/local/etc/php-fpm.d/www.conf.default /usr/local/etc/php-fpm.d/www.conf; \ + else \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ + fi; \ + fi + # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local @@ -97,7 +135,7 @@ RUN apt-get update && \ # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ - a2enmod mpm_prefork rewrite || true + a2enmod mpm_prefork rewrite cgi cgid || true # ---- Python toolchain used by tests ---- ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 60b18b37e..1bcb686e8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -491,12 +491,12 @@ jobs: if: matrix.server == 'nginx-php-fpm' run: | # Verify ZTS-built PHP-FPM exists and is ZTS-enabled - /usr/local/sbin/php-fpm -v | grep -q "ZTS" || (echo "ERROR: PHP-FPM not built with ZTS!" && exit 1) /usr/local/sbin/php-fpm -v + /usr/local/sbin/php-fpm -i | grep -q "Thread Safety => enabled" || (echo "ERROR: PHP-FPM not built with ZTS!" && exit 1) # Create symlink for nginx to find php-fpm ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true # Verify symlink works - php-fpm -v | grep -q "ZTS" || (echo "ERROR: php-fpm symlink not working!" && exit 1) + php-fpm -i | grep -q "Thread Safety => enabled" || (echo "ERROR: php-fpm symlink not working or not ZTS!" && exit 1) - name: Start MariaDB (background) run: | From 842fe5b91830fcc81ea9df39184a9c66afe4387f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 14:53:40 +0200 Subject: [PATCH 022/170] Add MySQL socket configuration for mysqli in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 4 ++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 634fa4280..6b467816d 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -133,6 +133,10 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ python3 -m pip install --no-cache-dir flask requests psutil diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 7c57de9d1..168fbbcc5 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -123,6 +123,10 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + # Install web servers and database (without PHP packages) RUN apt-get update && \ apt-get install -y --no-install-recommends \ From 0ea1ef03811102d8d38759877bf745e27a830cf5 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 17:15:44 +0200 Subject: [PATCH 023/170] Add GitHub workflows for building PHP test images and extensions in NTS/ZTS --- ...tension => Dockerfile.build-extension-nts} | 0 .../workflows/Dockerfile.build-extension-zts | 2 + ...hp-test => Dockerfile.centos-php-test-nts} | 0 .../workflows/Dockerfile.centos-php-test-zts | 2 + ...hp-test => Dockerfile.ubuntu-php-test-nts} | 0 .../workflows/Dockerfile.ubuntu-php-test-zts | 2 + ...l => build-centos-php-test-images-nts.yml} | 12 +- ...ges.yml => build-extension-images-nts.yml} | 12 +- ...l => build-ubuntu-php-test-images-nts.yml} | 12 +- .github/workflows/build.yml | 103 +++++++++++++++--- package/rpm/aikido.spec | 20 +++- tools/build.sh | 14 ++- tools/rpm_build.sh | 9 +- tools/sample_apps_build.sh | 14 ++- 14 files changed, 160 insertions(+), 42 deletions(-) rename .github/workflows/{Dockerfile.build-extension => Dockerfile.build-extension-nts} (100%) rename .github/workflows/{Dockerfile.centos-php-test => Dockerfile.centos-php-test-nts} (100%) rename .github/workflows/{Dockerfile.ubuntu-php-test => Dockerfile.ubuntu-php-test-nts} (100%) rename .github/workflows/{build-centos-php-test-images.yml => build-centos-php-test-images-nts.yml} (90%) rename .github/workflows/{build-extension-images.yml => build-extension-images-nts.yml} (90%) rename .github/workflows/{build-ubuntu-php-test-images.yml => build-ubuntu-php-test-images-nts.yml} (90%) diff --git a/.github/workflows/Dockerfile.build-extension b/.github/workflows/Dockerfile.build-extension-nts similarity index 100% rename from .github/workflows/Dockerfile.build-extension rename to .github/workflows/Dockerfile.build-extension-nts diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index 3ee96178e..26148de9e 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -90,3 +90,5 @@ WORKDIR /work CMD ["php", "-v"] + + diff --git a/.github/workflows/Dockerfile.centos-php-test b/.github/workflows/Dockerfile.centos-php-test-nts similarity index 100% rename from .github/workflows/Dockerfile.centos-php-test rename to .github/workflows/Dockerfile.centos-php-test-nts diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 6b467816d..4243faf36 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -146,3 +146,5 @@ WORKDIR /work CMD ["bash"] + + diff --git a/.github/workflows/Dockerfile.ubuntu-php-test b/.github/workflows/Dockerfile.ubuntu-php-test-nts similarity index 100% rename from .github/workflows/Dockerfile.ubuntu-php-test rename to .github/workflows/Dockerfile.ubuntu-php-test-nts diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 168fbbcc5..2f8f62316 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -181,3 +181,5 @@ WORKDIR /work CMD ["bash"] + + diff --git a/.github/workflows/build-centos-php-test-images.yml b/.github/workflows/build-centos-php-test-images-nts.yml similarity index 90% rename from .github/workflows/build-centos-php-test-images.yml rename to .github/workflows/build-centos-php-test-images-nts.yml index 181e1fe36..613fbe0b4 100644 --- a/.github/workflows/build-centos-php-test-images.yml +++ b/.github/workflows/build-centos-php-test-images-nts.yml @@ -1,15 +1,15 @@ -name: Build CentOS PHP test images +name: Build CentOS PHP test images (NTS) on: workflow_dispatch: push: paths: - - .github/workflows/Dockerfile.centos-php-test - - .github/workflows/build-centos-php-test-images.yml + - .github/workflows/Dockerfile.centos-php-test-nts + - .github/workflows/build-centos-php-test-images-nts.yml env: REGISTRY: ghcr.io - IMAGE_NAME: aikidosec/firewall-php-test-centos + IMAGE_NAME: aikidosec/firewall-php-test-centos-nts VERSION: v1 jobs: @@ -32,7 +32,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.centos-php-test + file: .github/workflows/Dockerfile.centos-php-test-nts platforms: linux/amd64 push: true build-args: | @@ -60,7 +60,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.centos-php-test + file: .github/workflows/Dockerfile.centos-php-test-nts platforms: linux/arm64 push: true build-args: | diff --git a/.github/workflows/build-extension-images.yml b/.github/workflows/build-extension-images-nts.yml similarity index 90% rename from .github/workflows/build-extension-images.yml rename to .github/workflows/build-extension-images-nts.yml index be8def81e..36a4bd9e9 100644 --- a/.github/workflows/build-extension-images.yml +++ b/.github/workflows/build-extension-images-nts.yml @@ -1,15 +1,15 @@ -name: Create images for building extension +name: Create images for building extension (NTS) on: workflow_dispatch: push: paths: - - .github/workflows/Dockerfile.build-extension - - .github/workflows/build-extension-images.yml + - .github/workflows/Dockerfile.build-extension-nts + - .github/workflows/build-extension-images-nts.yml env: REGISTRY: ghcr.io - IMAGE_NAME: aikidosec/firewall-php-build-extension + IMAGE_NAME: aikidosec/firewall-php-build-extension-nts VERSION: v1 jobs: @@ -36,7 +36,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.build-extension + file: .github/workflows/Dockerfile.build-extension-nts target: dev platforms: linux/amd64 push: true @@ -71,7 +71,7 @@ jobs: uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.build-extension + file: .github/workflows/Dockerfile.build-extension-nts target: dev platforms: linux/arm64 push: true diff --git a/.github/workflows/build-ubuntu-php-test-images.yml b/.github/workflows/build-ubuntu-php-test-images-nts.yml similarity index 90% rename from .github/workflows/build-ubuntu-php-test-images.yml rename to .github/workflows/build-ubuntu-php-test-images-nts.yml index 767e389e5..f9d597d0c 100644 --- a/.github/workflows/build-ubuntu-php-test-images.yml +++ b/.github/workflows/build-ubuntu-php-test-images-nts.yml @@ -1,15 +1,15 @@ -name: Build Ubuntu PHP test images +name: Build Ubuntu PHP test images (NTS) on: workflow_dispatch: push: paths: - - .github/workflows/Dockerfile.ubuntu-php-test - - .github/workflows/build-ubuntu-php-test-images.yml + - .github/workflows/Dockerfile.ubuntu-php-test-nts + - .github/workflows/build-ubuntu-php-test-images-nts.yml env: REGISTRY: ghcr.io - IMAGE_NAME: aikidosec/firewall-php-test-ubuntu + IMAGE_NAME: aikidosec/firewall-php-test-ubuntu-nts VERSION: v1 jobs: @@ -30,7 +30,7 @@ jobs: - uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.ubuntu-php-test + file: .github/workflows/Dockerfile.ubuntu-php-test-nts platforms: linux/amd64 push: true build-args: | @@ -56,7 +56,7 @@ jobs: - uses: docker/build-push-action@v6 with: context: . - file: .github/workflows/Dockerfile.ubuntu-php-test + file: .github/workflows/Dockerfile.ubuntu-php-test-nts platforms: linux/arm64 push: true build-args: | diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1bcb686e8..7a11fd854 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -78,10 +78,10 @@ jobs: path: | ${{ github.workspace }}/build/aikido-request-processor.so - build_php_extension: - name: Build php${{ matrix.php_version }} extension${{ matrix.arch }} + build_php_extension_nts: + name: Build php${{ matrix.php_version }} extension NTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} - container: ghcr.io/aikidosec/firewall-php-build-extension:${{ matrix.php_version }}-v1 + container: ghcr.io/aikidosec/firewall-php-build-extension-nts:${{ matrix.php_version }}-v1 strategy: matrix: php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] @@ -104,10 +104,61 @@ jobs: - name: Check PHP setup run: | - which php - php -v - php -i - php -m | grep -E 'curl|mysqli' || (echo "Required extensions missing" && php -m && exit 1) + php -v | grep -v "ZTS" > /dev/null || (echo "ERROR: PHP is ZTS, expected NTS!" && php -v && exit 1) + + + - name: Build extension + run: | + rm -rf build + mkdir build + cd lib/php-extension + phpize + cd ../../build + CXX=g++ CXXFLAGS="-fPIC -g -O2 -I../lib/php-extension/include" LDFLAGS="-lstdc++" ../lib/php-extension/configure + make -j"$(nproc)" + + - name: Version Aikido extension + run: | + cd ./build/modules + mv aikido.so ${{ env.AIKIDO_ARTIFACT }}-nts.so + + - name: Archive build artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: ${{ env.AIKIDO_ARTIFACT }}-nts-${{ env.ARCH }} + if-no-files-found: error + path: | + ${{ github.workspace }}/build/modules/${{ env.AIKIDO_ARTIFACT }}-nts.so + ${{ github.workspace }}/tests/*.diff + + build_php_extension_zts: + name: Build php${{ matrix.php_version }} extension ZTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + runs-on: ubuntu-24.04${{ matrix.arch }} + container: ghcr.io/aikidosec/firewall-php-build-extension-zts:${{ matrix.php_version }}-v1 + strategy: + matrix: + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + arch: [ '', '-arm' ] + fail-fast: false + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Get Arch + run: echo "ARCH=$(uname -m)" >> $GITHUB_ENV + + - name: Get Aikido version + run: | + AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') + echo $AIKIDO_VERSION + echo "AIKIDO_VERSION=$AIKIDO_VERSION" >> $GITHUB_ENV + echo "AIKIDO_ARTIFACT=aikido-extension-php-${{ matrix.php_version }}" >> $GITHUB_ENV + + - name: Check PHP setup + run: | + php -v | grep -q "ZTS" || (echo "ERROR: PHP is not ZTS!" && php -v && exit 1) - name: Build extension run: | @@ -122,16 +173,16 @@ jobs: - name: Version Aikido extension run: | cd ./build/modules - mv aikido.so ${{ env.AIKIDO_ARTIFACT }}.so + mv aikido.so ${{ env.AIKIDO_ARTIFACT }}-zts.so - name: Archive build artifacts uses: actions/upload-artifact@v4 if: always() with: - name: ${{ env.AIKIDO_ARTIFACT }}-${{ env.ARCH }} + name: ${{ env.AIKIDO_ARTIFACT }}-zts-${{ env.ARCH }} if-no-files-found: error path: | - ${{ github.workspace }}/build/modules/${{ env.AIKIDO_ARTIFACT }}.so + ${{ github.workspace }}/build/modules/${{ env.AIKIDO_ARTIFACT }}-zts.so ${{ github.workspace }}/tests/*.diff build_rpm: @@ -143,7 +194,7 @@ jobs: matrix: arch: ['', '-arm'] fail-fast: false - needs: [ build_libs, build_php_extension ] + needs: [ build_libs, build_php_extension_nts, build_php_extension_zts ] steps: - name: Checkout repository uses: actions/checkout@v4 @@ -165,11 +216,17 @@ jobs: echo "AIKIDO_LIBZEN=libzen_internals_${{ env.ARCH }}-unknown-linux-gnu.so" >> $GITHUB_ENV echo "AIKIDO_LIBZEN_VERSION=0.1.48" >> $GITHUB_ENV - - name: Download artifacts + - name: Download artifacts (NTS) uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4 with: pattern: | - aikido-extension-php-*-${{ env.ARCH }} + aikido-extension-php-*-nts-${{ env.ARCH }} + + - name: Download artifacts (ZTS) + uses: actions/download-artifact@v4 + with: + pattern: | + aikido-extension-php-*-zts-${{ env.ARCH }} - name: Download artifacts uses: actions/download-artifact@v4 @@ -192,8 +249,20 @@ jobs: mv aikido-agent-${{ env.ARCH }}/aikido-agent package/rpm/opt/aikido/aikido-agent mv aikido-request-processor-${{ env.ARCH }}/aikido-request-processor.so package/rpm/opt/aikido/aikido-request-processor.so mv ${{ env.AIKIDO_LIBZEN }} package/rpm/opt/aikido/${{ env.AIKIDO_LIBZEN }} - ls -lR aikido-extension-php-* - mv aikido-extension-php-*/__w/firewall-php/firewall-php/build/modules/aikido-extension-php-* package/rpm/opt/aikido/ + # Copy NTS extensions + for dir in aikido-extension-php-*-nts-*/; do + if [ -d "$dir" ]; then + find "$dir" -name "aikido-extension-php-*-nts.so" -exec mv {} package/rpm/opt/aikido/ \; + fi + done + # Copy ZTS extensions + for dir in aikido-extension-php-*-zts-*/; do + if [ -d "$dir" ]; then + find "$dir" -name "aikido-extension-php-*-zts.so" -exec mv {} package/rpm/opt/aikido/ \; + fi + done + echo "Extensions in package:" + ls -la package/rpm/opt/aikido/aikido-extension-php-*.so || true mv package/rpm/opt/aikido package/rpm/opt/aikido-${{ env.AIKIDO_VERSION }} chmod 777 package/rpm/opt/aikido-${{ env.AIKIDO_VERSION }}/* rpmdev-setuptree @@ -302,7 +371,7 @@ jobs: name: CentOS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-centos:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-centos-nts:${{ matrix.php_version }}-v1 options: --privileged needs: [ build_rpm ] strategy: @@ -388,7 +457,7 @@ jobs: name: Ubuntu php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-ubuntu:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-ubuntu-nts:${{ matrix.php_version }}-v1 options: --privileged needs: [ build_deb ] strategy: diff --git a/package/rpm/aikido.spec b/package/rpm/aikido.spec index 930079a67..d6d28faad 100644 --- a/package/rpm/aikido.spec +++ b/package/rpm/aikido.spec @@ -82,10 +82,26 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do PHP_EXT_DIR=$($PHP_BIN -i | grep "^extension_dir" | awk '{print $3}') PHP_MOD_DIR=$($PHP_BIN -i | grep "Scan this dir for additional .ini files" | awk -F"=> " '{print $2}') + # Detect if PHP is ZTS or NTS + PHP_THREAD_SAFETY=$($PHP_BIN -i | grep "Thread Safety" | awk -F"=> " '{print $2}' | tr -d ' ') + if [ "$PHP_THREAD_SAFETY" = "enabled" ]; then + EXT_SUFFIX="-zts" + echo "PHP $PHP_VERSION is ZTS (Thread Safe)" + else + EXT_SUFFIX="-nts" + echo "PHP $PHP_VERSION is NTS (Non-Thread Safe)" + fi + # Install Aikido PHP extension if [ -d "$PHP_EXT_DIR" ]; then - echo "Installing new Aikido extension in $PHP_EXT_DIR/aikido-%{version}.so..." - ln -sf /opt/aikido-%{version}/aikido-extension-php-$PHP_VERSION.so $PHP_EXT_DIR/aikido-%{version}.so + EXT_FILE="aikido-extension-php-$PHP_VERSION$EXT_SUFFIX.so" + if [ -f "/opt/aikido-%{version}/$EXT_FILE" ]; then + echo "Installing new Aikido extension in $PHP_EXT_DIR/aikido-%{version}.so..." + ln -sf /opt/aikido-%{version}/$EXT_FILE $PHP_EXT_DIR/aikido-%{version}.so + else + echo "Warning: Extension file /opt/aikido-%{version}/$EXT_FILE not found! Skipping..." + continue + fi else echo "No extension dir for PHP $PHP_VERSION! Skipping..." continue diff --git a/tools/build.sh b/tools/build.sh index 395ac9345..834b10100 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -3,8 +3,18 @@ set -e export PATH="$PATH:$HOME/go/bin:$HOME/.local/bin" PHP_VERSION=$(php -v | grep -oP 'PHP \K\d+\.\d+' | head -n 1) -AIKIDO_EXTENSION=aikido-extension-php-$PHP_VERSION.so -AIKIDO_EXTENSION_DEBUG=aikido-extension-php-$PHP_VERSION.so.debug + +# Detect if PHP is ZTS or NTS +if php -v | grep -q "ZTS"; then + EXT_SUFFIX="-zts" + echo "Building ZTS extension" +else + EXT_SUFFIX="-nts" + echo "Building NTS extension" +fi + +AIKIDO_EXTENSION=aikido-extension-php-$PHP_VERSION$EXT_SUFFIX.so +AIKIDO_EXTENSION_DEBUG=aikido-extension-php-$PHP_VERSION$EXT_SUFFIX.so.debug rm -rf build mkdir build diff --git a/tools/rpm_build.sh b/tools/rpm_build.sh index c77ecfcf8..1425d3565 100755 --- a/tools/rpm_build.sh +++ b/tools/rpm_build.sh @@ -8,6 +8,13 @@ VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido AIKIDO_INTERNALS_REPO=https://api.github.com/repos/AikidoSec/zen-internals AIKIDO_INTERNALS_LIB=libzen_internals_$arch-unknown-linux-gnu.so +# Detect if PHP is ZTS or NTS +if php -v | grep -q "ZTS"; then + EXT_SUFFIX="-zts" +else + EXT_SUFFIX="-nts" +fi + mkdir -p ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION cp -rf package/rpm/opt ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/ @@ -16,7 +23,7 @@ cp -f package/rpm/aikido.spec ~/rpmbuild/SPECS/ cp build/aikido-agent ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/opt/aikido/aikido-agent cp build/aikido-request-processor.so ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/opt/aikido/aikido-request-processor.so -cp build/modules/aikido-extension-php-$PHP_VERSION.so ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/opt/aikido/aikido-extension-php-$PHP_VERSION.so +cp build/modules/aikido-extension-php-$PHP_VERSION$EXT_SUFFIX.so ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/opt/aikido/aikido-extension-php-$PHP_VERSION$EXT_SUFFIX.so curl -L -o $AIKIDO_INTERNALS_LIB $(curl -s $AIKIDO_INTERNALS_REPO/releases/latest | jq -r ".assets[] | select(.name == \"$AIKIDO_INTERNALS_LIB\") | .browser_download_url") mv $AIKIDO_INTERNALS_LIB ~/rpmbuild/SOURCES/aikido-php-firewall-$VERSION/opt/aikido/$AIKIDO_INTERNALS_LIB diff --git a/tools/sample_apps_build.sh b/tools/sample_apps_build.sh index d83c453ef..a65382cb6 100644 --- a/tools/sample_apps_build.sh +++ b/tools/sample_apps_build.sh @@ -6,8 +6,18 @@ export PATH="$PATH:$HOME/go/bin:$HOME/.local/bin" PHP_VERSION=$(php -v | grep -oP 'PHP \K\d+\.\d+' | head -n 1) AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') -AIKIDO_EXTENSION=aikido-extension-php-$AIKIDO_VERSION.so -AIKIDO_EXTENSION_DEBUG=aikido-extension-php-$AIKIDO_VERSION.so.debug + +# Detect if PHP is ZTS or NTS +if php -v | grep -q "ZTS"; then + EXT_SUFFIX="-zts" + echo "Building ZTS extension" +else + EXT_SUFFIX="-nts" + echo "Building NTS extension" +fi + +AIKIDO_EXTENSION=aikido-extension-php-$AIKIDO_VERSION$EXT_SUFFIX.so +AIKIDO_EXTENSION_DEBUG=aikido-extension-php-$AIKIDO_VERSION$EXT_SUFFIX.so.debug AIKIDO_INTERNALS_REPO=https://api.github.com/repos/AikidoSec/zen-internals AIKIDO_INTERNALS_LIB=libzen_internals_$arch-unknown-linux-gnu.so From d7f67050d6bc5346e3ad2f02c104047674508921 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 17:33:37 +0200 Subject: [PATCH 024/170] Updated naming convention for tests too --- .github/workflows/build.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7a11fd854..26eddfc53 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -368,7 +368,7 @@ jobs: ${{ env.AIKIDO_ARTIFACT }} test_php_centos: - name: CentOS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + name: CentOS NTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-test-centos-nts:${{ matrix.php_version }}-v1 @@ -454,7 +454,7 @@ jobs: python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 test_php_ubuntu: - name: Ubuntu php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + name: Ubuntu NTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-test-ubuntu-nts:${{ matrix.php_version }}-v1 @@ -519,7 +519,7 @@ jobs: python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 test_php_ubuntu_zts: - name: Ubuntu ZTS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + name: Ubuntu ZTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-test-ubuntu-zts:${{ matrix.php_version }}-v1 @@ -587,7 +587,7 @@ jobs: python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 test_php_centos_zts: - name: CentOS ZTS php-${{ matrix.php_version }} ${{ matrix.server }}${{ matrix.arch }} + name: CentOS ZTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v1 From 754b35a66f40f67be20ee8a9865a0cd37314fb52 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 17:38:19 +0200 Subject: [PATCH 025/170] ++ Naming convention --- .github/workflows/build.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 26eddfc53..0fc7261f3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ on: jobs: build_libs: - name: Build Go libs${{ matrix.arch }} + name: Build Go libs ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-build-libs:v1 @@ -79,7 +79,7 @@ jobs: ${{ github.workspace }}/build/aikido-request-processor.so build_php_extension_nts: - name: Build php${{ matrix.php_version }} extension NTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + name: Build php ${{ matrix.php_version }} extension NTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: ghcr.io/aikidosec/firewall-php-build-extension-nts:${{ matrix.php_version }}-v1 strategy: @@ -133,7 +133,7 @@ jobs: ${{ github.workspace }}/tests/*.diff build_php_extension_zts: - name: Build php${{ matrix.php_version }} extension ZTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + name: Build php ${{ matrix.php_version }} extension ZTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: ghcr.io/aikidosec/firewall-php-build-extension-zts:${{ matrix.php_version }}-v1 strategy: @@ -186,7 +186,7 @@ jobs: ${{ github.workspace }}/tests/*.diff build_rpm: - name: Build rpm${{ matrix.arch }} + name: Build rpm ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: quay.io/centos/centos:stream9 @@ -299,7 +299,7 @@ jobs: ~/rpmbuild/RPMS/${{ env.ARCH }}/${{ env.AIKIDO_ARTIFACT_RELEASE }} build_deb: - name: Build deb${{ matrix.arch }} + name: Build deb ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ubuntu:22.04 From 079c999f521eefa68c534435a2b883f2c2e15032 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 15:51:32 +0000 Subject: [PATCH 026/170] Remove -dev tag from php builds versioning --- .github/workflows/Dockerfile.build-extension-zts | 4 ++++ .github/workflows/Dockerfile.centos-php-test-zts | 4 ++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/.github/workflows/Dockerfile.build-extension-zts b/.github/workflows/Dockerfile.build-extension-zts index 26148de9e..2fc2b99dd 100644 --- a/.github/workflows/Dockerfile.build-extension-zts +++ b/.github/workflows/Dockerfile.build-extension-zts @@ -40,6 +40,8 @@ ARG PHP_SRC_REF WORKDIR /usr/src RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac RUN ./buildconf --force # Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) @@ -68,6 +70,7 @@ RUN ./configure \ --enable-maintainer-zts \ --enable-mbstring \ --enable-pcntl \ + --with-extra-version="" \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -92,3 +95,4 @@ CMD ["php", "-v"] + diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 4243faf36..fe47aea88 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -49,6 +49,8 @@ ARG PHP_SRC_REF WORKDIR /usr/src RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac RUN ./buildconf --force # Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) @@ -76,6 +78,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --with-extra-version="" \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -148,3 +151,4 @@ CMD ["bash"] + diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 2f8f62316..cc74a0375 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -39,6 +39,8 @@ ARG PHP_SRC_REF WORKDIR /usr/src RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac RUN ./buildconf --force # Patch openssl.c for OpenSSL compatibility @@ -66,6 +68,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --with-extra-version="" \ --with-curl \ --with-mysqli \ --with-openssl \ @@ -183,3 +186,4 @@ CMD ["bash"] + From b05ddb559e11f1d2212d149e8fe2edbccf897c48 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 16:05:13 +0000 Subject: [PATCH 027/170] Add symbolic links for PHP and PHP-FPM in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 3 +++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 3 +++ 2 files changed, 6 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index fe47aea88..a942efca1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -136,6 +136,9 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index cc74a0375..cd032a617 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -126,6 +126,9 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini From 4ee292db7dd35416c3f6ce29ada2fb47f2af8584 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 16:15:30 +0000 Subject: [PATCH 028/170] Add directories for PHP-FPM configuration in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 4 ++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 +++++ tools/server_tests/apache/main.py | 2 +- tools/server_tests/nginx/main.py | 2 +- 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index a942efca1..74d5632c2 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -139,6 +139,10 @@ ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true +RUN mkdir -p /etc/php-fpm.d && \ + mkdir -p /run/php-fpm && \ + mkdir -p /etc/httpd || true + # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index cd032a617..a2839a826 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -129,6 +129,11 @@ ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true +RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ + mkdir -p /etc/php && \ + mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ + mkdir -p /run/php-fpm + # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini diff --git a/tools/server_tests/apache/main.py b/tools/server_tests/apache/main.py index 22161fa37..c0bc18efc 100755 --- a/tools/server_tests/apache/main.py +++ b/tools/server_tests/apache/main.py @@ -148,7 +148,7 @@ def toggle_config_line(file_path, line_to_check, comment_ch, enable=False): commented_line_pattern = r"\s*" + re.escape(line_to_check.strip()) + r".*" if enable: - commented_line_pattern = "\s*" + comment_ch + commented_line_pattern + commented_line_pattern = r"\s*" + comment_ch + commented_line_pattern # Initialize a flag to track changes changes_made = False diff --git a/tools/server_tests/nginx/main.py b/tools/server_tests/nginx/main.py index 68e322175..50aaa2952 100644 --- a/tools/server_tests/nginx/main.py +++ b/tools/server_tests/nginx/main.py @@ -33,7 +33,7 @@ def get_user_of_process(process_name): except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): pass -nginx_conf_template = """ +nginx_conf_template = r""" server {{ listen {port}; server_name {name}; From 522ee1443fd775c9115c777d8927669686bcc9ce Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 22:54:30 +0000 Subject: [PATCH 029/170] + --- lib/php-extension/Action.cpp | 2 +- lib/php-extension/PhpWrappers.cpp | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index 0771a2724..d53e07fe8 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -3,8 +3,8 @@ ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); - zend_throw_exception(zend_exception_get_default(), _message.c_str(), _code); CallPhpFunctionWithOneParam("http_response_code", _code); + zend_throw_exception(zend_exception_get_default(), _message.c_str(), _code); return BLOCK; } diff --git a/lib/php-extension/PhpWrappers.cpp b/lib/php-extension/PhpWrappers.cpp index 7958c2039..927289e40 100644 --- a/lib/php-extension/PhpWrappers.cpp +++ b/lib/php-extension/PhpWrappers.cpp @@ -17,7 +17,6 @@ bool CallPhpEcho(std::string message) { bool CallPhpFunction(std::string function_name, unsigned int params_number, zval* params, zval* return_value, zval* object) { if (!object && !zend_hash_str_exists(CG(function_table), function_name.c_str(), function_name.size())) { - AIKIDO_LOG_INFO("Function name '%s' does not exist!\n", function_name.c_str()); return false; } @@ -38,10 +37,14 @@ bool CallPhpFunction(std::string function_name, unsigned int params_number, zval zval_dtor(&_function_name); + if (_result != SUCCESS) { + return false; + } + if (!return_value) { zval_ptr_dtor(&_temp_return_value); } - return _result == SUCCESS; + return true; } bool CallPhpFunctionWithOneParam(std::string function_name, long first_param, zval* return_value, zval* object) { @@ -91,3 +94,4 @@ std::string CallPhpFunctionCurlGetInfo(zval* curl_handle, int curl_info_option) return result; } + From 7ac0e8d8375fadfcdfc9473a8c65fbf0c19c11ec Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 23:21:02 +0000 Subject: [PATCH 030/170] Add PHP-FPM configuration setup in CentOS and Ubuntu Dockerfiles --- .../workflows/Dockerfile.centos-php-test-zts | 23 ++++++++++++++++++- .../workflows/Dockerfile.ubuntu-php-test-zts | 23 ++++++++++++++++++- 2 files changed, 44 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 74d5632c2..4eebc8d0f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -141,7 +141,28 @@ RUN ln -sf /usr/local/bin/php /usr/bin/php && \ RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /run/php-fpm && \ - mkdir -p /etc/httpd || true + mkdir -p /var/run && \ + mkdir -p /var/log && \ + mkdir -p /etc/httpd || true && \ + if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ + fi # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index a2839a826..5cbe0191c 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -132,7 +132,28 @@ RUN ln -sf /usr/local/bin/php /usr/bin/php && \ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /etc/php && \ mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ - mkdir -p /run/php-fpm + mkdir -p /run/php-fpm && \ + mkdir -p /var/run && \ + mkdir -p /var/log && \ + if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ + fi && \ + if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ + fi # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ From 7624bc0e5c84537d811029257c63a054b4758c0e Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 23:32:37 +0000 Subject: [PATCH 031/170] Update PHP-FPM configuration to use 'root' user and group in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 12 +++++++----- .github/workflows/Dockerfile.ubuntu-php-test-zts | 12 +++++++----- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 4eebc8d0f..7bb6f69d9 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -144,6 +144,7 @@ RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /var/run && \ mkdir -p /var/log && \ mkdir -p /etc/httpd || true && \ + mkdir -p /usr/local/etc/php-fpm.d && \ if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ @@ -152,17 +153,18 @@ RUN mkdir -p /etc/php-fpm.d && \ fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi + fi && \ + php-fpm --test || true # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 5cbe0191c..3841c83f0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -135,6 +135,7 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /run/php-fpm && \ mkdir -p /var/run && \ mkdir -p /var/log && \ + mkdir -p /usr/local/etc/php-fpm.d && \ if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ @@ -143,17 +144,18 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi + fi && \ + php-fpm --test || true # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ From 815418802c2c39399c9e7d1cd634955947256a79 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 20 Nov 2025 23:44:17 +0000 Subject: [PATCH 032/170] Refactor PHP-FPM startup commands in CentOS and Ubuntu Dockerfiles to include configuration validation and symbolic links --- .github/workflows/Dockerfile.centos-php-test-zts | 5 ++++- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7bb6f69d9..8ef5c6931 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -164,7 +164,10 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - php-fpm --test || true + php-fpm -t -y /usr/local/etc/php-fpm.conf && \ + mkdir -p /etc/php-fpm.d && \ + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d /etc/php-fpm.d # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 3841c83f0..f385e90f9 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -155,7 +155,10 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - php-fpm --test || true + php-fpm -t -y /usr/local/etc/php-fpm.conf && \ + mkdir -p /etc/php-fpm.d && \ + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d /etc/php-fpm.d # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ From 04e06fe9076bcae2f36624fcf72826e6149d160c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 00:01:26 +0000 Subject: [PATCH 033/170] Enhance PHP-FPM configuration in Dockerfiles for CentOS and Ubuntu to ensure 'daemonize' is set to 'no' for PHP 8.2+, and improve validation checks during startup. --- .../workflows/Dockerfile.centos-php-test-zts | 17 ++++++++++++++--- .../workflows/Dockerfile.ubuntu-php-test-zts | 17 ++++++++++++++--- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 8ef5c6931..6a95610ac 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -101,10 +101,15 @@ RUN mkdir -p /usr/local/etc/php-fpm.d && \ if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ + # Ensure daemonize is set for PHP 8.2+ + if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ + sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ + fi; \ else \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi; \ fi && \ @@ -149,6 +154,7 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ @@ -164,10 +170,15 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - php-fpm -t -y /usr/local/etc/php-fpm.conf && \ + PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + if [ "$PHP_MAJOR" -ge 8 ] && [ "$PHP_MINOR" -ge 2 ]; then \ + php-fpm -t -y /usr/local/etc/php-fpm.conf || (echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && cat /usr/local/etc/php-fpm.conf && exit 1); \ + else \ + php-fpm -t -y /usr/local/etc/php-fpm.conf; \ + fi && \ mkdir -p /etc/php-fpm.d && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ - ln -sf /usr/local/etc/php-fpm.d /etc/php-fpm.d + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index f385e90f9..86cd90e59 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -91,10 +91,15 @@ RUN mkdir -p /usr/local/etc/php-fpm.d && \ if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ + # Ensure daemonize is set for PHP 8.2+ + if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ + sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ + fi; \ else \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi; \ fi && \ @@ -140,6 +145,7 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ @@ -155,10 +161,15 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - php-fpm -t -y /usr/local/etc/php-fpm.conf && \ + PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + if [ "$PHP_MAJOR" -ge 8 ] && [ "$PHP_MINOR" -ge 2 ]; then \ + php-fpm -t -y /usr/local/etc/php-fpm.conf || (echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && cat /usr/local/etc/php-fpm.conf && exit 1); \ + else \ + php-fpm -t -y /usr/local/etc/php-fpm.conf; \ + fi && \ mkdir -p /etc/php-fpm.d && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ - ln -sf /usr/local/etc/php-fpm.d /etc/php-fpm.d + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ From e6e02e90edc0a9c98c523d58e30c4facc1c6e7fb Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 00:03:55 +0000 Subject: [PATCH 034/170] Ensure 'daemonize' is set to 'no' in PHP-FPM configuration for CentOS and Ubuntu Dockerfiles, and improve error handling during configuration validation. --- .github/workflows/Dockerfile.centos-php-test-zts | 13 ++++++------- .github/workflows/Dockerfile.ubuntu-php-test-zts | 13 ++++++------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 6a95610ac..c7f32404c 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -157,6 +157,10 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi && \ + # Ensure daemonize is set (required for PHP 8.2+) even if config exists from build stage \ + if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ + sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ + fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -170,13 +174,8 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ - PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ - if [ "$PHP_MAJOR" -ge 8 ] && [ "$PHP_MINOR" -ge 2 ]; then \ - php-fpm -t -y /usr/local/etc/php-fpm.conf || (echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && cat /usr/local/etc/php-fpm.conf && exit 1); \ - else \ - php-fpm -t -y /usr/local/etc/php-fpm.conf; \ - fi && \ + # Test the configuration \ + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 86cd90e59..c7e88c182 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -148,6 +148,10 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ fi && \ + # Ensure daemonize is set (required for PHP 8.2+) even if config exists from build stage \ + if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ + sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ + fi && \ if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -161,13 +165,8 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ fi && \ - PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ - PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ - if [ "$PHP_MAJOR" -ge 8 ] && [ "$PHP_MINOR" -ge 2 ]; then \ - php-fpm -t -y /usr/local/etc/php-fpm.conf || (echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && cat /usr/local/etc/php-fpm.conf && exit 1); \ - else \ - php-fpm -t -y /usr/local/etc/php-fpm.conf; \ - fi && \ + # Test the configuration \ + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf From 2ca27a93631c320d4d634799ec13cd0fbbbfcfae Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 00:06:44 +0000 Subject: [PATCH 035/170] Refactor PHP-FPM configuration in CentOS and Ubuntu --- .../workflows/Dockerfile.centos-php-test-zts | 42 ++++++++----------- .../workflows/Dockerfile.ubuntu-php-test-zts | 42 ++++++++----------- 2 files changed, 36 insertions(+), 48 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index c7f32404c..1cc52c5a1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -150,30 +150,24 @@ RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /var/log && \ mkdir -p /etc/httpd || true && \ mkdir -p /usr/local/etc/php-fpm.d && \ - if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ - echo "[global]" > /usr/local/etc/php-fpm.conf && \ - echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ - fi && \ - # Ensure daemonize is set (required for PHP 8.2+) even if config exists from build stage \ - if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ - sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ - echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi && \ + # Always create a clean, minimal config file (overwrite any from build stage) \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + # Always create a clean pool config (overwrite any from build stage) \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ # Test the configuration \ php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ mkdir -p /etc/php-fpm.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c7e88c182..4d7b10a7c 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -141,30 +141,24 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /var/run && \ mkdir -p /var/log && \ mkdir -p /usr/local/etc/php-fpm.d && \ - if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ - echo "[global]" > /usr/local/etc/php-fpm.conf && \ - echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ - fi && \ - # Ensure daemonize is set (required for PHP 8.2+) even if config exists from build stage \ - if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ - sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ - echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi && \ + # Always create a clean, minimal config file (overwrite any from build stage) \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + # Always create a clean pool config (overwrite any from build stage) \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ # Test the configuration \ php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ mkdir -p /etc/php-fpm.d && \ From 2d6768d6ff61f248c09fd697072e2f2b480c2b26 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 00:42:08 +0000 Subject: [PATCH 036/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 8 +++----- .github/workflows/Dockerfile.ubuntu-php-test-zts | 3 ++- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 1cc52c5a1..a9f7363e7 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -150,13 +150,12 @@ RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /var/log && \ mkdir -p /etc/httpd || true && \ mkdir -p /usr/local/etc/php-fpm.d && \ - # Always create a clean, minimal config file (overwrite any from build stage) \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - # Always create a clean pool config (overwrite any from build stage) \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -168,8 +167,7 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ - # Test the configuration \ - php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ + # Test the configuration \ php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 4d7b10a7c..5ecb90a5f 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -142,10 +142,11 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /var/log && \ mkdir -p /usr/local/etc/php-fpm.d && \ # Always create a clean, minimal config file (overwrite any from build stage) \ + # Note: daemonize = yes for test environment so subprocess.run() doesn't hang \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ # Always create a clean pool config (overwrite any from build stage) \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ From da5c41b037a5f06a7046696a6dfbb41daa240cb6 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 01:31:02 +0000 Subject: [PATCH 037/170] Update PHP-FPM configuration --- .../workflows/Dockerfile.centos-php-test-zts | 1 + .../workflows/Dockerfile.ubuntu-php-test-zts | 3 ++- tools/server_tests/nginx/main.py | 18 ++++++++++++++++-- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index a9f7363e7..c5fd4bda7 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -156,6 +156,7 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 5ecb90a5f..3d45dec35 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -148,7 +148,8 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - # Always create a clean pool config (overwrite any from build stage) \ + echo "include=/etc/php/${PHP_VER}/fpm/pool.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ diff --git a/tools/server_tests/nginx/main.py b/tools/server_tests/nginx/main.py index 50aaa2952..591671b32 100644 --- a/tools/server_tests/nginx/main.py +++ b/tools/server_tests/nginx/main.py @@ -182,6 +182,12 @@ def nginx_php_fpm_process_test(test_data): nginx_create_conf_file(test_data["test_name"], test_data["test_dir"], test_data["server_port"]) test_data["fpm_config"] = php_fpm_create_conf_file(test_data["test_dir"], test_data["test_name"], "root", test_data["env"]) + # Reload PHP-FPM to pick up the new pool config + result = subprocess.run(['pgrep', '-f', 'php-fpm'], capture_output=True, text=True) + if result.stdout.strip(): + php_fpm_pid = result.stdout.strip().split('\n')[0] + subprocess.run(['kill', '-USR2', php_fpm_pid], check=False) # Reload config + time.sleep(0.5) # Give it time to reload return test_data @@ -195,9 +201,17 @@ def nginx_php_fpm_pre_tests(): create_folder(f'{log_dir}/php-fpm') modify_nginx_conf(nginx_global_conf) subprocess.run(['nginx'], check=True) - subprocess.run([php_fpm_bin, '--allow-to-run-as-root'], check=True) + # Start PHP-FPM in background and verify it's running + # Use /etc/php-fpm.conf if it exists (symlink to /usr/local/etc/php-fpm.conf), otherwise use /usr/local/etc/php-fpm.conf + php_fpm_config = "/etc/php-fpm.conf" if os.path.exists("/etc/php-fpm.conf") else "/usr/local/etc/php-fpm.conf" + subprocess.run([php_fpm_bin, '--allow-to-run-as-root', '-y', php_fpm_config], check=True) + time.sleep(2) + # Verify PHP-FPM is actually running + result = subprocess.run(['pgrep', '-f', 'php-fpm'], capture_output=True, text=True) + if not result.stdout.strip(): + raise RuntimeError("PHP-FPM failed to start!") print("nginx and php-fpm servers restarted!") - time.sleep(5) + time.sleep(3) def nginx_php_fpm_start_server(test_data, test_lib_dir, valgrind): From 8752b050bc79238843cfcf108df994c2c88451b3 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 21 Nov 2025 12:59:09 +0000 Subject: [PATCH 038/170] Refactor PHP-FPM configuration in CentOS and Ubuntu Dockerfiles --- .../workflows/Dockerfile.centos-php-test-zts | 68 +++++------------- .../workflows/Dockerfile.ubuntu-php-test-zts | 70 +++++-------------- tools/server_tests/nginx/main.py | 18 +---- 3 files changed, 40 insertions(+), 116 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index c5fd4bda7..3e6c95cd1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -88,49 +88,6 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Install PHP-FPM configuration files -RUN mkdir -p /usr/local/etc/php-fpm.d && \ - if [ -f sapi/fpm/php-fpm.conf ]; then \ - cp sapi/fpm/php-fpm.conf /usr/local/etc/php-fpm.conf.default || \ - cp sapi/fpm/php-fpm.conf.default /usr/local/etc/php-fpm.conf.default || true; \ - fi && \ - if [ -f sapi/fpm/www.conf ]; then \ - cp sapi/fpm/www.conf /usr/local/etc/php-fpm.d/www.conf.default || \ - cp sapi/fpm/www.conf.default /usr/local/etc/php-fpm.d/www.conf.default || true; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ - if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ - cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ - # Ensure daemonize is set for PHP 8.2+ - if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ - sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ - fi; \ - else \ - echo "[global]" > /usr/local/etc/php-fpm.conf && \ - echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ - fi; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ - if [ -f /usr/local/etc/php-fpm.d/www.conf.default ]; then \ - cp /usr/local/etc/php-fpm.d/www.conf.default /usr/local/etc/php-fpm.d/www.conf; \ - else \ - echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi; \ - fi - # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local @@ -144,19 +101,23 @@ ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true -RUN mkdir -p /etc/php-fpm.d && \ +RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /run/php-fpm && \ mkdir -p /var/run && \ - mkdir -p /var/log && \ + mkdir -p /var/log/php-fpm && \ mkdir -p /etc/httpd || true && \ mkdir -p /usr/local/etc/php-fpm.d && \ + mkdir -p /usr/local/etc/php/conf.d && \ + + ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -168,9 +129,18 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ - # Test the configuration \ php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ - mkdir -p /etc/php-fpm.d && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 3d45dec35..d223af21a 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -78,49 +78,6 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Install PHP-FPM configuration files -RUN mkdir -p /usr/local/etc/php-fpm.d && \ - if [ -f sapi/fpm/php-fpm.conf ]; then \ - cp sapi/fpm/php-fpm.conf /usr/local/etc/php-fpm.conf.default || \ - cp sapi/fpm/php-fpm.conf.default /usr/local/etc/php-fpm.conf.default || true; \ - fi && \ - if [ -f sapi/fpm/www.conf ]; then \ - cp sapi/fpm/www.conf /usr/local/etc/php-fpm.d/www.conf.default || \ - cp sapi/fpm/www.conf.default /usr/local/etc/php-fpm.d/www.conf.default || true; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.conf ]; then \ - if [ -f /usr/local/etc/php-fpm.conf.default ]; then \ - cp /usr/local/etc/php-fpm.conf.default /usr/local/etc/php-fpm.conf; \ - # Ensure daemonize is set for PHP 8.2+ - if ! grep -q "^daemonize" /usr/local/etc/php-fpm.conf; then \ - sed -i '/^\[global\]/a daemonize = no' /usr/local/etc/php-fpm.conf; \ - fi; \ - else \ - echo "[global]" > /usr/local/etc/php-fpm.conf && \ - echo "pid = /var/run/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = no" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf; \ - fi; \ - fi && \ - if [ ! -f /usr/local/etc/php-fpm.d/www.conf ]; then \ - if [ -f /usr/local/etc/php-fpm.d/www.conf.default ]; then \ - cp /usr/local/etc/php-fpm.d/www.conf.default /usr/local/etc/php-fpm.d/www.conf; \ - else \ - echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = nobody" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf; \ - fi; \ - fi - # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local @@ -135,17 +92,20 @@ RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ - mkdir -p /etc/php && \ + mkdir -p /etc/php/${PHP_VER}/fpm && \ mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ + mkdir -p /run/php && \ mkdir -p /run/php-fpm && \ mkdir -p /var/run && \ mkdir -p /var/log && \ mkdir -p /usr/local/etc/php-fpm.d && \ - # Always create a clean, minimal config file (overwrite any from build stage) \ - # Note: daemonize = yes for test environment so subprocess.run() doesn't hang \ + mkdir -p /usr/local/etc/php/conf.d && \ + + ln -sf /usr/local/etc/php/conf.d /etc/php/${PHP_VER}/fpm/conf.d || true && \ + echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php${PHP_VER}-fpm.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ echo "include=/etc/php/${PHP_VER}/fpm/pool.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ @@ -161,10 +121,18 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ - # Test the configuration \ - php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 || (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && exit 1) && \ - mkdir -p /etc/php-fpm.d && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true # Configure MySQL socket path for mysqli (so "localhost" connections work) RUN mkdir -p /usr/local/etc/php/conf.d && \ diff --git a/tools/server_tests/nginx/main.py b/tools/server_tests/nginx/main.py index 591671b32..50aaa2952 100644 --- a/tools/server_tests/nginx/main.py +++ b/tools/server_tests/nginx/main.py @@ -182,12 +182,6 @@ def nginx_php_fpm_process_test(test_data): nginx_create_conf_file(test_data["test_name"], test_data["test_dir"], test_data["server_port"]) test_data["fpm_config"] = php_fpm_create_conf_file(test_data["test_dir"], test_data["test_name"], "root", test_data["env"]) - # Reload PHP-FPM to pick up the new pool config - result = subprocess.run(['pgrep', '-f', 'php-fpm'], capture_output=True, text=True) - if result.stdout.strip(): - php_fpm_pid = result.stdout.strip().split('\n')[0] - subprocess.run(['kill', '-USR2', php_fpm_pid], check=False) # Reload config - time.sleep(0.5) # Give it time to reload return test_data @@ -201,17 +195,9 @@ def nginx_php_fpm_pre_tests(): create_folder(f'{log_dir}/php-fpm') modify_nginx_conf(nginx_global_conf) subprocess.run(['nginx'], check=True) - # Start PHP-FPM in background and verify it's running - # Use /etc/php-fpm.conf if it exists (symlink to /usr/local/etc/php-fpm.conf), otherwise use /usr/local/etc/php-fpm.conf - php_fpm_config = "/etc/php-fpm.conf" if os.path.exists("/etc/php-fpm.conf") else "/usr/local/etc/php-fpm.conf" - subprocess.run([php_fpm_bin, '--allow-to-run-as-root', '-y', php_fpm_config], check=True) - time.sleep(2) - # Verify PHP-FPM is actually running - result = subprocess.run(['pgrep', '-f', 'php-fpm'], capture_output=True, text=True) - if not result.stdout.strip(): - raise RuntimeError("PHP-FPM failed to start!") + subprocess.run([php_fpm_bin, '--allow-to-run-as-root'], check=True) print("nginx and php-fpm servers restarted!") - time.sleep(3) + time.sleep(5) def nginx_php_fpm_start_server(test_data, test_lib_dir, valgrind): From 0ebb573d3ed181c38f4bd041a100a43e1a15d67f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 2 Dec 2025 00:18:08 +0000 Subject: [PATCH 039/170] Add FrankenPHP support to environment handling and request processing --- lib/php-extension/Environment.cpp | 19 +++ lib/php-extension/RequestProcessor.cpp | 38 +++-- lib/php-extension/include/Environment.h | 4 + tests/server/test_disable/test.py | 3 +- tests/server/test_domains/test.py | 9 +- tools/run_server_tests.py | 49 ++++-- tools/server_tests/frankenphp_classic/main.py | 83 ++++++++++ tools/server_tests/frankenphp_worker/main.py | 147 ++++++++++++++++++ 8 files changed, 328 insertions(+), 24 deletions(-) create mode 100644 tools/server_tests/frankenphp_classic/main.py create mode 100644 tools/server_tests/frankenphp_worker/main.py diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 187b2884d..67f6f93ba 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -96,6 +96,23 @@ bool LoadLaravelEnvFile() { return true; } + +/* + FrankenPHP's Caddyfile env directive only populates $_SERVER, not the process environment. + This function reads environment variables from $_SERVER for FrankenPHP compatibility. +*/ +std::string GetFrankenEnvVariable(const std::string& env_key) { + std::string env_value = AIKIDO_GLOBAL(server).GetVar(env_key.c_str()); + if (!env_value.empty()) { + if (env_key == "AIKIDO_TOKEN") { + AIKIDO_LOG_DEBUG("franken_env[%s] = %s\n", env_key.c_str(), AnonymizeToken(env_value).c_str()); + } else { + AIKIDO_LOG_DEBUG("franken_env[%s] = %s\n", env_key.c_str(), env_value.c_str()); + } + } + return env_value; +} + std::string GetLaravelEnvVariable(const std::string& env_key) { const auto& laravelEnv = AIKIDO_GLOBAL(laravelEnv); if (laravelEnv.find(env_key) != laravelEnv.end()) { @@ -113,12 +130,14 @@ std::string GetLaravelEnvVariable(const std::string& env_key) { Load env variables from the following sources (in this order): - System environment variables - PHP environment variables + - FrankenPHP environment variables - Laravel environment variables */ using EnvGetterFn = std::string(*)(const std::string&); EnvGetterFn envGetters[] = { &GetSystemEnvVariable, &GetPhpEnvVariable, + &GetFrankenEnvVariable, &GetLaravelEnvVariable }; diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index b2d18e167..e6e686b7c 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -128,14 +128,14 @@ bool RequestProcessor::Init() { } std::string initDataString = this->GetInitData(); - if (AIKIDO_GLOBAL(disable) == true && AIKIDO_GLOBAL(sapi_name) != "apache2handler") { + if (AIKIDO_GLOBAL(disable) == true && AIKIDO_GLOBAL(sapi_name) != "apache2handler" && AIKIDO_GLOBAL(sapi_name) != "frankenphp") { /* - As you can set AIKIDO_DISABLE per site, in an apache-mod-php setup, as a process can serve multiple sites, + As you can set AIKIDO_DISABLE per site, in an apache-mod-php or frankenphp setup, as a process can serve multiple sites, we can't just not initialize the request processor, as it can be disabled for one site but not for another. When subsequent requests come in for the non-disabled sites, the request processor needs to be initialized. - For non-apache-mod-php SAPI, we can just not initialize the request processor if AIKIDO_DISABLE is set to 1. + For non-apache-mod-php/frankenphp SAPI, we can just not initialize the request processor if AIKIDO_DISABLE is set to 1. */ - AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1 and SAPI is not apache2handler!\n"); + AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1 and SAPI is not apache2handler or frankenphp!\n"); return false; } @@ -215,30 +215,35 @@ bool RequestProcessor::RequestInit() { AIKIDO_LOG_INFO("RequestProcessorInit called successfully\n"); } - + const auto& sapiName = AIKIDO_GLOBAL(sapi_name); - if (sapiName == "apache2handler") { - // Apache-mod-php can serve multiple sites per process + if (sapiName == "apache2handler" || sapiName == "frankenphp") { + // Apache-mod-php and FrankenPHP can serve multiple sites per process // We need to reload config each request to detect token changes + // Check disable BEFORE modifying any state (shared Go state or per-instance state) + // Use GetEnvBool() to read disable flag without modifying global state + if (GetEnvBool("AIKIDO_DISABLE", false)) { + AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1!\n"); + return true; + } this->LoadConfigFromEnvironment(); } else { - // Server APIs that are not apache-mod-php (like php-fpm, cli-server, ...) + // Server APIs that are not apache-mod-php/frankenphp (like php-fpm, cli-server, ...) // can only serve one site per process, so the config should be loaded at the first request. // If the token is not set at the first request, we try to reload it until we get a valid token. // The user can update .env file via zero downtime deployments after the PHP server is started. if (AIKIDO_GLOBAL(token) == "") { AIKIDO_LOG_INFO("Loading Aikido config until we get a valid token for SAPI: %s...\n", AIKIDO_GLOBAL(sapi_name).c_str()); + if (GetEnvBool("AIKIDO_DISABLE", false)) { + AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1!\n"); + return true; + } this->LoadConfigFromEnvironment(); } } AIKIDO_LOG_DEBUG("RINIT started!\n"); - if (AIKIDO_GLOBAL(disable) == true) { - AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1!\n"); - return true; - } - this->requestInitialized = true; this->numberOfRequests++; @@ -270,10 +275,17 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } void RequestProcessor::LoadConfigFromEnvironment() { + // In ZTS mode (FrankenPHP), AIKIDO_GLOBAL(token) is thread-local, so each thread has its own copy + // We read the previous token before LoadEnvironment() updates it with the current request's token auto& globalToken = AIKIDO_GLOBAL(token); std::string previousToken = globalToken; + + // LoadEnvironment() reads from $_SERVER['AIKIDO_TOKEN'] (per-request in FrankenPHP) + // and updates AIKIDO_GLOBAL(token) with the current request's token LoadEnvironment(); + std::string currentToken = globalToken; + LoadConfig(previousToken, currentToken); } diff --git a/lib/php-extension/include/Environment.h b/lib/php-extension/include/Environment.h index 88ee812ea..2c0e0685f 100644 --- a/lib/php-extension/include/Environment.h +++ b/lib/php-extension/include/Environment.h @@ -5,3 +5,7 @@ void LoadEnvironment(); bool LoadLaravelEnvFile(); bool GetBoolFromString(const std::string& env, bool default_value); + +bool GetEnvBool(const std::string& env_key, bool default_value); + +std::string GetEnvString(const std::string& env_key, const std::string default_value); diff --git a/tests/server/test_disable/test.py b/tests/server/test_disable/test.py index ad695e158..bd523f20a 100755 --- a/tests/server/test_disable/test.py +++ b/tests/server/test_disable/test.py @@ -16,7 +16,8 @@ def run_test(): assert_response_code_is(response, 200) assert_response_body_contains(response, "File opened!") - if mock_server_get_platform_name() != "apache2handler": + platform = mock_server_get_platform_name() + if platform != "apache2handler" and platform != "frankenphp": events = mock_server_get_events() assert_events_length_is(events, 0) diff --git a/tests/server/test_domains/test.py b/tests/server/test_domains/test.py index 46e88e5d9..96319a531 100755 --- a/tests/server/test_domains/test.py +++ b/tests/server/test_domains/test.py @@ -18,7 +18,14 @@ def run_test(): events = mock_server_get_events() assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - assert_event_contains_subset_file(events[1], "expect_domains.json") + + with open("expect_domains.json", 'r') as file: + expected = json.load(file) + + if mock_server_get_platform_name() == "frankenphp": + expected["hostnames"] = [h for h in expected["hostnames"] if h.get("hostname") != "127.0.0.1"] + + assert_event_contains_subset("__root", events[1], expected) if __name__ == "__main__": load_test_args() diff --git a/tools/run_server_tests.py b/tools/run_server_tests.py index 545a1abde..373608ad7 100755 --- a/tools/run_server_tests.py +++ b/tools/run_server_tests.py @@ -7,9 +7,12 @@ import json import argparse import socket +import urllib.request from server_tests.php_built_in.main import php_built_in_start_server from server_tests.apache.main import apache_mod_php_init, apache_mod_php_process_test, apache_mod_php_pre_tests, apache_mod_php_start_server, apache_mod_php_uninit from server_tests.nginx.main import nginx_php_fpm_init, nginx_php_fpm_process_test, nginx_php_fpm_pre_tests, nginx_php_fpm_start_server, nginx_php_fpm_uninit +from server_tests.frankenphp_classic.main import frankenphp_classic_init, frankenphp_classic_process_test, frankenphp_classic_pre_tests, frankenphp_classic_start_server, frankenphp_classic_uninit +from server_tests.frankenphp_worker.main import frankenphp_worker_init, frankenphp_worker_process_test, frankenphp_worker_pre_tests, frankenphp_worker_start_server, frankenphp_worker_uninit INIT = 0 PROCESS_TEST = 1 @@ -39,6 +42,20 @@ nginx_php_fpm_start_server, nginx_php_fpm_uninit ), + "frankenphp-classic": ( + frankenphp_classic_init, + frankenphp_classic_process_test, + frankenphp_classic_pre_tests, + frankenphp_classic_start_server, + frankenphp_classic_uninit + ), + "frankenphp-worker": ( + frankenphp_worker_init, + frankenphp_worker_process_test, + frankenphp_worker_pre_tests, + frankenphp_worker_start_server, + frankenphp_worker_uninit + ), } used_ports = set() @@ -46,6 +63,8 @@ failed_tests = [] lock = threading.Lock() +max_concurrent_tests = 69 +test_semaphore = threading.Semaphore(max_concurrent_tests) def is_port_in_active_use(port): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: @@ -76,14 +95,24 @@ def print_test_results(s, tests): for t in tests: print(f"\t- {t}") +def handle_test_scenario_with_semaphore(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug): + test_semaphore.acquire() + try: + handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) + finally: + test_semaphore.release() + def handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug): test_name = data["test_name"] mock_port = data["mock_port"] server_port = data["server_port"] + mock_aikido_core = None + server_process = None + test_process = None try: print(f"Running {test_name}...") print(f"Starting mock server on port {mock_port} with start_config.json for {test_name}...") - mock_aikido_core = subprocess.Popen(["python3", "mock_aikido_core.py", str(mock_port), data["config_path"]]) + mock_aikido_core = subprocess.Popen(["python3", "-u", "mock_aikido_core.py", str(mock_port), data["config_path"]]) time.sleep(5) print(f"Starting {server} server on port {server_port} for {test_name}...") @@ -105,16 +134,13 @@ def handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, subprocess.run(["python3", test_script_name, str(server_port), str(mock_port), test_name], env=dict(os.environ, PYTHONPATH=f"{test_lib_dir}:$PYTHONPATH"), cwd=test_script_cwd, - check=True, timeout=600, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + check=True, timeout=600) passed_tests.append(test_name) except subprocess.CalledProcessError as e: print(f"Error in testing scenario {test_name}:") - print(f"Exception output: {e.output}") print(f"Test exit code: {e.returncode}") - print(f"Test stdout: {e.stdout.decode()}") - print(f"Test stderr: {e.stderr.decode()}") failed_tests.append(test_name) except subprocess.TimeoutExpired: @@ -172,12 +198,17 @@ def main(root_tests_dir, test_lib_dir, test_dirs, server="php-built-in", benchma tests_data.append(test_data) if servers[server][PRE_TESTS] is not None: - test_data = servers[server][PRE_TESTS]() - + pre_tests = servers[server][PRE_TESTS] + if server in ["frankenphp-classic", "frankenphp-worker"]: + pre_tests(tests_data) + else: + pre_tests() + threads = [] + target_func = handle_test_scenario_with_semaphore if server in ["frankenphp-classic", "frankenphp-worker"] else handle_test_scenario for test_data in tests_data: args = (test_data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) - thread = threading.Thread(target=handle_test_scenario, args=args) + thread = threading.Thread(target=target_func, args=args) threads.append(thread) thread.start() time.sleep(10) @@ -197,7 +228,7 @@ def main(root_tests_dir, test_lib_dir, test_dirs, server="php-built-in", benchma parser.add_argument("--benchmark", action="store_true", help="Enable benchmarking.") parser.add_argument("--valgrind", action="store_true", help="Enable valgrind.") parser.add_argument("--debug", action="store_true", help="Enable debugging logs.") - parser.add_argument("--server", type=str, choices=["php-built-in", "apache-mod-php", "nginx-php-fpm"], default="php-built-in", help="Select the type of server testing.") + parser.add_argument("--server", type=str, choices=["php-built-in", "apache-mod-php", "nginx-php-fpm", "frankenphp-classic", "frankenphp-worker"], default="php-built-in", help="Select the type of server testing.") parser.add_argument("--max-tests", type=int, default=0, help="Maximum number of tests to execute.") parser.add_argument("--max-runs", type=int, default=1, help="Maximum number of test runs.") diff --git a/tools/server_tests/frankenphp_classic/main.py b/tools/server_tests/frankenphp_classic/main.py new file mode 100644 index 000000000..13e366a4b --- /dev/null +++ b/tools/server_tests/frankenphp_classic/main.py @@ -0,0 +1,83 @@ +import os +import subprocess +import time +import urllib.request + +frankenphp_bin = "frankenphp" +caddyfile_path = "/tmp/frankenphp_test.caddyfile" +log_dir = "/var/log/frankenphp" + +caddyfile_base_template = """{{ + frankenphp {{ + num_threads {num_threads} + max_threads {max_threads} + }} +}} +""" + +site_block_template = """http://:{port} {{ + root * {test_dir} + php_server {{ +{env_vars} + }} +}} +""" + +def create_folder(folder_path): + if not os.path.exists(folder_path): + os.makedirs(folder_path) + +def frankenphp_create_site_block(test_data): + env_vars = "" + for key, value in test_data["env"].items(): + env_vars += f" env {key} \"{value}\"\n" + + return site_block_template.format( + port=test_data["server_port"], + test_dir=test_data["test_dir"], + env_vars=env_vars + ) + +def frankenphp_classic_init(tests_dir): + if os.path.exists(caddyfile_path): + os.remove(caddyfile_path) + create_folder(log_dir) + create_folder('/etc/frankenphp/php.d') + +def frankenphp_classic_process_test(test_data): + test_data["site_block"] = frankenphp_create_site_block(test_data) + return test_data + +def frankenphp_classic_pre_tests(tests_data): + subprocess.run(['pkill', '-9', '-x', 'frankenphp'], stderr=subprocess.DEVNULL) + subprocess.run(['pkill', '-9', '-f', 'mock_aikido_core'], stderr=subprocess.DEVNULL) + subprocess.run(['rm', '-rf', f'{log_dir}/*']) + subprocess.run(['rm', '-rf', f'/var/log/aikido-*/*']) + + total_workers = len(tests_data) + threads = total_workers * 2 + + with open(caddyfile_path, 'w') as f: + f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads)) + for test_data in tests_data: + f.write("\n" + test_data["site_block"]) + + subprocess.Popen( + [frankenphp_bin, 'run', '--config', caddyfile_path], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + time.sleep(20) + + result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + if not result.stdout.strip(): + raise RuntimeError("FrankenPHP classic failed to start!") + +def frankenphp_classic_start_server(test_data, test_lib_dir, valgrind): + return None + +def frankenphp_classic_uninit(): + subprocess.run(['pkill', '-9', '-x', 'frankenphp'], stderr=subprocess.DEVNULL) + if os.path.exists(caddyfile_path): + os.remove(caddyfile_path) + diff --git a/tools/server_tests/frankenphp_worker/main.py b/tools/server_tests/frankenphp_worker/main.py new file mode 100644 index 000000000..c7306707a --- /dev/null +++ b/tools/server_tests/frankenphp_worker/main.py @@ -0,0 +1,147 @@ +import os +import subprocess +import time +import urllib.request + +frankenphp_bin = "frankenphp" +caddyfile_path = "/tmp/frankenphp_worker_test.caddyfile" +log_dir = "/var/log/frankenphp" +worker_scripts_dir = "/tmp/frankenphp_workers" + +num_workers = 1 + +caddyfile_base_template = """{{ + frankenphp {{ + num_threads {num_threads} + max_threads {max_threads} + }} +}} +""" + +site_block_template = """http://:{port} {{ + root * {test_dir} + php_server {{ +{env_vars} + worker {{ + file {worker_script} + num {num_workers} + }} + }} +}} +""" + +worker_script_template = """ 0; $nbWorkers = frankenphp_handle_request($handler)) {{ + gc_collect_cycles(); +}} +""" + +def create_folder(folder_path): + if not os.path.exists(folder_path): + os.makedirs(folder_path) + +def frankenphp_worker_create_script(test_dir, test_name): + worker_script_path = os.path.join(worker_scripts_dir, f"{test_name}.php") + worker_script_content = worker_script_template.format(test_dir=test_dir) + + with open(worker_script_path, 'w') as f: + f.write(worker_script_content) + + return worker_script_path + +def frankenphp_worker_create_site_block(test_data, worker_script_path): + env_vars = f" env DOCUMENT_ROOT \"{test_data['test_dir']}\"\n" + for key, value in test_data["env"].items(): + env_vars += f" env {key} \"{value}\"\n" + + return site_block_template.format( + port=test_data["server_port"], + test_dir=test_data["test_dir"], + worker_script=worker_script_path, + env_vars=env_vars, + num_workers=num_workers + ) + +def frankenphp_worker_init(tests_dir): + if os.path.exists(caddyfile_path): + os.remove(caddyfile_path) + subprocess.run(['rm', '-rf', f'{worker_scripts_dir}/*']) + create_folder(log_dir) + create_folder('/etc/frankenphp/php.d') + create_folder(worker_scripts_dir) + +def frankenphp_worker_process_test(test_data): + test_name = test_data["test_name"] + worker_script_path = frankenphp_worker_create_script(test_data["test_dir"], test_name) + test_data["site_block"] = frankenphp_worker_create_site_block(test_data, worker_script_path) + return test_data + +def frankenphp_worker_pre_tests(tests_data): + subprocess.run(['pkill', '-9', '-x', 'frankenphp'], stderr=subprocess.DEVNULL) + subprocess.run(['pkill', '-9', '-f', 'mock_aikido_core'], stderr=subprocess.DEVNULL) + subprocess.run(['rm', '-rf', f'{log_dir}/*']) + subprocess.run(['rm', '-rf', f'/var/log/aikido-*/*']) + subprocess.run(['rm', '-rf', f'{worker_scripts_dir}/*']) + + total_workers = len(tests_data) + threads = total_workers * 2 + + with open(caddyfile_path, 'w') as f: + f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads)) + for test_data in tests_data: + f.write("\n" + test_data["site_block"]) + + process = subprocess.Popen( + [frankenphp_bin, 'run', '--config', caddyfile_path], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + time.sleep(20) + + result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + if not result.stdout.strip(): + raise RuntimeError("FrankenPHP worker failed to start!") + + for i in range(30): + try: + urllib.request.urlopen('http://localhost:2019/config', timeout=1) + break + except: + time.sleep(1) + else: + raise RuntimeError("FrankenPHP admin API not ready!") + + print(f"FrankenPHP worker started with {threads} threads for {len(tests_data)} tests") + +def frankenphp_worker_start_server(test_data, test_lib_dir, valgrind): + return None + +def frankenphp_worker_uninit(): + subprocess.run(['pkill', '-9', '-x', 'frankenphp'], stderr=subprocess.DEVNULL) + if os.path.exists(caddyfile_path): + os.remove(caddyfile_path) + subprocess.run(['rm', '-rf', f'{worker_scripts_dir}/*']) + From 015ccb50068b950c7455f2a344aea9cca5cb92e2 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 12:58:59 +0000 Subject: [PATCH 040/170] Enhance request processing and context handling by integrating instance management across various components. Update environment variable retrieval, logging, and security checks to utilize instance context, improving thread safety and consistency in handling requests. Refactor related tests to align with new instance-based methods. --- lib/php-extension/Environment.cpp | 5 + lib/php-extension/RequestProcessor.cpp | 6 +- lib/php-extension/include/Utils.h | 2 +- lib/request-processor/aikido_types/handle.go | 3 +- .../api_discovery/getApiAuthType.go | 13 +- .../api_discovery/getApiAuthType_test.go | 40 +-- .../api_discovery/getApiInfo.go | 17 +- lib/request-processor/attack/attack.go | 41 ++-- lib/request-processor/config/config.go | 7 +- lib/request-processor/context/cache.go | 213 +++++++++------- .../context/context_for_unit_tests.go | 39 ++- lib/request-processor/context/data_sources.go | 4 +- .../context/event_context.go | 35 ++- .../context/event_getters.go | 52 ++-- .../context/request_context.go | 232 +++++++++--------- lib/request-processor/globals/globals.go | 72 +++++- lib/request-processor/grpc/client.go | 83 ++++--- lib/request-processor/grpc/config.go | 8 +- .../handle_blocking_request.go | 73 +++--- .../handle_path_traversal.go | 18 +- .../handle_rate_limit_group_event.go | 6 +- .../handle_request_metadata.go | 40 ++- .../handle_shell_execution.go | 14 +- lib/request-processor/handle_sql_queries.go | 14 +- lib/request-processor/handle_urls.go | 45 ++-- lib/request-processor/handle_user_event.go | 11 +- .../helpers/resolveHostname.go | 5 +- lib/request-processor/instance/wrapper.go | 38 ++- lib/request-processor/log/log.go | 215 +++++++++------- lib/request-processor/main.go | 41 ++-- lib/request-processor/utils/utils.go | 27 +- lib/request-processor/utils/utils_test.go | 16 +- .../checkContextForPathTraversal.go | 5 +- .../checkContextForPathTraversal_test.go | 22 +- .../checkContextForShellInjection.go | 5 +- .../checkContextForShellInjection_test.go | 8 +- .../checkContextForSqlInjection.go | 5 +- .../ssrf/checkContextForSSRF.go | 27 +- .../ssrf/getResolvedIpStatus.go | 9 +- .../ssrf/getResolvedIpStatus_test.go | 2 +- .../vulnerabilities/ssrf/isRequestToItself.go | 14 +- .../ssrf/isRequestToItself_test.go | 48 ++-- .../zen-internals/zen_internals.go | 8 +- package/rpm/aikido.spec | 120 ++++----- tools/mock_aikido_core.py | 2 +- tools/run_server_tests.py | 18 +- tools/server_tests/frankenphp_classic/main.py | 4 +- tools/server_tests/frankenphp_worker/main.py | 8 +- 48 files changed, 979 insertions(+), 761 deletions(-) diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 67f6f93ba..47bbb9465 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -102,6 +102,11 @@ bool LoadLaravelEnvFile() { This function reads environment variables from $_SERVER for FrankenPHP compatibility. */ std::string GetFrankenEnvVariable(const std::string& env_key) { + if (Z_TYPE(PG(http_globals)[TRACK_VARS_SERVER]) != IS_ARRAY) { + AIKIDO_LOG_DEBUG("franken_env[%s] = (empty - $_SERVER not an array)\n", env_key.c_str()); + return ""; + } + std::string env_value = AIKIDO_GLOBAL(server).GetVar(env_key.c_str()); if (!env_value.empty()) { if (env_key == "AIKIDO_TOKEN") { diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index e6e686b7c..0d5c8d8e1 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -275,13 +275,9 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } void RequestProcessor::LoadConfigFromEnvironment() { - // In ZTS mode (FrankenPHP), AIKIDO_GLOBAL(token) is thread-local, so each thread has its own copy - // We read the previous token before LoadEnvironment() updates it with the current request's token auto& globalToken = AIKIDO_GLOBAL(token); std::string previousToken = globalToken; - - // LoadEnvironment() reads from $_SERVER['AIKIDO_TOKEN'] (per-request in FrankenPHP) - // and updates AIKIDO_GLOBAL(token) with the current request's token + LoadEnvironment(); std::string currentToken = globalToken; diff --git a/lib/php-extension/include/Utils.h b/lib/php-extension/include/Utils.h index 19bef5387..ffdf98ff2 100644 --- a/lib/php-extension/include/Utils.h +++ b/lib/php-extension/include/Utils.h @@ -12,7 +12,7 @@ std::string GetTime(); std::string GetDateTime(); -uint64_t GetThreadID(); // Returns 0 for NTS, pthread_self() for ZTS +uint64_t GetThreadID(); const char* GetEventName(EVENT_ID event); diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index 63130364c..083a4dd9a 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -8,7 +8,8 @@ type Method struct { } type RequestShutdownParams struct { - Server *ServerData + ThreadID uint64 + Token string Method string Route string RouteParsed string diff --git a/lib/request-processor/api_discovery/getApiAuthType.go b/lib/request-processor/api_discovery/getApiAuthType.go index f95ba514b..8f55e82d6 100644 --- a/lib/request-processor/api_discovery/getApiAuthType.go +++ b/lib/request-processor/api_discovery/getApiAuthType.go @@ -2,6 +2,7 @@ package api_discovery import ( "main/context" + "main/instance" "main/ipc/protos" "slices" "strings" @@ -30,10 +31,10 @@ var commonAuthCookieNames = append([]string{ // GetApiAuthType returns the authentication type of the API request. // Returns nil if the authentication type could not be determined. -func GetApiAuthType() []*protos.APIAuthType { +func GetApiAuthType(inst *instance.RequestProcessorInstance) []*protos.APIAuthType { var result []*protos.APIAuthType - headers := context.GetHeadersParsed() + headers := context.GetHeadersParsed(inst) // Check the Authorization header authHeader, authHeaderExists := headers["authorization"].(string) @@ -44,7 +45,7 @@ func GetApiAuthType() []*protos.APIAuthType { } } - result = append(result, findApiKeys()...) + result = append(result, findApiKeys(inst)...) return result } @@ -81,11 +82,11 @@ func getPhpHttpHeaderEquivalent(apiKey string) string { } // findApiKeys searches for API keys in headers and cookies. -func findApiKeys() []*protos.APIAuthType { +func findApiKeys(inst *instance.RequestProcessorInstance) []*protos.APIAuthType { var result []*protos.APIAuthType - headers := context.GetHeadersParsed() - cookies := context.GetCookiesParsed() + headers := context.GetHeadersParsed(inst) + cookies := context.GetCookiesParsed(inst) for header_index, header := range commonApiKeyHeaderNames { if value, exists := headers[getPhpHttpHeaderEquivalent(header)]; exists && value != "" { result = append(result, &protos.APIAuthType{ diff --git a/lib/request-processor/api_discovery/getApiAuthType_test.go b/lib/request-processor/api_discovery/getApiAuthType_test.go index 6e9c7f304..1a736ba98 100644 --- a/lib/request-processor/api_discovery/getApiAuthType_test.go +++ b/lib/request-processor/api_discovery/getApiAuthType_test.go @@ -12,34 +12,34 @@ import ( func TestDetectAuthorizationHeader(t *testing.T) { assert := assert.New(t) - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "authorization": "Bearer token", }), }) assert.Equal([]*protos.APIAuthType{ {Type: "http", Scheme: "bearer"}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "authorization": "Basic base64", }), }) assert.Equal([]*protos.APIAuthType{ {Type: "http", Scheme: "basic"}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "authorization": "custom", }), }) assert.Equal([]*protos.APIAuthType{ {Type: "apiKey", In: "header", Name: "Authorization"}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() } @@ -47,24 +47,24 @@ func TestDetectAuthorizationHeader(t *testing.T) { func TestDetectApiKeys(t *testing.T) { assert := assert.New(t) - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "x_api_key": "token", }), }) assert.Equal([]*protos.APIAuthType{ {Type: "apiKey", In: ("header"), Name: ("x-api-key")}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "api_key": "token", }), }) assert.Equal([]*protos.APIAuthType{ {Type: "apiKey", In: ("header"), Name: ("api-key")}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() } @@ -72,20 +72,20 @@ func TestDetectApiKeys(t *testing.T) { func TestDetectAuthCookies(t *testing.T) { assert := assert.New(t) - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "cookies": "api-key=token", }) assert.Equal([]*protos.APIAuthType{ {Type: "apiKey", In: ("cookie"), Name: ("api-key")}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "cookies": "session=test", }) assert.Equal([]*protos.APIAuthType{ {Type: "apiKey", In: ("cookie"), Name: ("session")}, - }, GetApiAuthType()) + }, GetApiAuthType(inst)) context.UnloadForUnitTests() } @@ -93,21 +93,21 @@ func TestDetectAuthCookies(t *testing.T) { func TestNoAuth(t *testing.T) { assert := assert.New(t) - context.LoadForUnitTests(map[string]string{}) - assert.Empty(GetApiAuthType()) + inst := context.LoadForUnitTests(map[string]string{}) + assert.Empty(GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{}), }) - assert.Empty(GetApiAuthType()) + assert.Empty(GetApiAuthType(inst)) context.UnloadForUnitTests() - context.LoadForUnitTests(map[string]string{ + inst = context.LoadForUnitTests(map[string]string{ "headers": context.GetJsonString(map[string]interface{}{ "authorization": "", }), }) - assert.Empty(GetApiAuthType()) + assert.Empty(GetApiAuthType(inst)) context.UnloadForUnitTests() } diff --git a/lib/request-processor/api_discovery/getApiInfo.go b/lib/request-processor/api_discovery/getApiInfo.go index f308426ea..8f40bc9db 100644 --- a/lib/request-processor/api_discovery/getApiInfo.go +++ b/lib/request-processor/api_discovery/getApiInfo.go @@ -3,29 +3,30 @@ package api_discovery import ( . "main/aikido_types" "main/context" + "main/instance" "main/ipc/protos" "main/log" "reflect" ) -func GetApiInfo(server *ServerData) *protos.APISpec { +func GetApiInfo(inst *instance.RequestProcessorInstance, server *ServerData) *protos.APISpec { if !server.AikidoConfig.CollectApiSchema { - log.Debug("AIKIDO_FEATURE_COLLECT_API_SCHEMA is not enabled -> no API schema!") + log.Debug(inst, "AIKIDO_FEATURE_COLLECT_API_SCHEMA is not enabled -> no API schema!") return nil } var bodyInfo *protos.APIBodyInfo var queryInfo *protos.DataSchema - body := context.GetBodyParsed() - headers := context.GetHeadersParsed() - query := context.GetQueryParsed() + body := context.GetBodyParsed(inst) + headers := context.GetHeadersParsed(inst) + query := context.GetQueryParsed(inst) // Check body data if body != nil && isObject(body) && len(body) > 0 { bodyType := getBodyDataType(headers) if bodyType == Undefined { - log.Debug("Body type is undefined -> no API schema!") + log.Debug(inst, "Body type is undefined -> no API schema!") return nil } @@ -43,10 +44,10 @@ func GetApiInfo(server *ServerData) *protos.APISpec { } // Get Auth Info - authInfo := GetApiAuthType() + authInfo := GetApiAuthType(inst) if bodyInfo == nil && queryInfo == nil && authInfo == nil { - log.Debug("All sub-schemas are empty -> no API schema!") + log.Debug(inst, "All sub-schemas are empty -> no API schema!") return nil } diff --git a/lib/request-processor/attack/attack.go b/lib/request-processor/attack/attack.go index 3be894dae..83871dcfc 100644 --- a/lib/request-processor/attack/attack.go +++ b/lib/request-processor/attack/attack.go @@ -6,6 +6,7 @@ import ( "html" "main/context" "main/grpc" + "main/instance" "main/ipc/protos" "main/utils" ) @@ -20,9 +21,9 @@ func GetMetadataProto(metadata map[string]string) []*protos.Metadata { } /* Convert headers map to protobuf structure to be sent via gRPC to the Agent */ -func GetHeadersProto() []*protos.Header { +func GetHeadersProto(inst *instance.RequestProcessorInstance) []*protos.Header { var headersProto []*protos.Header - for key, value := range context.GetHeadersParsed() { + for key, value := range context.GetHeadersParsed(inst) { valueStr, ok := value.(string) if ok { headersProto = append(headersProto, &protos.Header{Key: key, Value: valueStr}) @@ -32,31 +33,35 @@ func GetHeadersProto() []*protos.Header { } /* Construct the AttackDetected protobuf structure to be sent via gRPC to the Agent */ -func GetAttackDetectedProto(res utils.InterceptorResult) *protos.AttackDetected { +func GetAttackDetectedProto(res utils.InterceptorResult, inst *instance.RequestProcessorInstance) *protos.AttackDetected { + token := inst.GetCurrentToken() + server := inst.GetCurrentServer() + + serverPID := context.GetServerPID() return &protos.AttackDetected{ - Token: context.GetCurrentServerToken(), - ServerPid: context.GetServerPID(), + Token: token, + ServerPid: serverPID, Request: &protos.Request{ - Method: context.GetMethod(), - IpAddress: context.GetIp(), - UserAgent: context.GetUserAgent(), - Url: context.GetUrl(), - Headers: GetHeadersProto(), - Body: context.GetBodyRaw(), + Method: context.GetMethod(inst), + IpAddress: context.GetIp(inst), + UserAgent: context.GetUserAgent(inst), + Url: context.GetUrl(inst), + Headers: GetHeadersProto(inst), + Body: context.GetBodyRaw(inst), Source: "php", - Route: context.GetRoute(), + Route: context.GetRoute(inst), }, Attack: &protos.Attack{ Kind: string(res.Kind), Operation: res.Operation, - Module: context.GetModule(), - Blocked: utils.IsBlockingEnabled(context.GetCurrentServer()), + Module: context.GetModule(inst), + Blocked: utils.IsBlockingEnabled(server), Source: res.Source, Path: res.PathToPayload, - Stack: context.GetStackTrace(), + Stack: context.GetStackTrace(inst), Payload: res.Payload, Metadata: GetMetadataProto(res.Metadata), - UserId: context.GetUserId(), + UserId: context.GetUserId(inst), }, } } @@ -86,11 +91,11 @@ func GetAttackDetectedAction(result utils.InterceptorResult) string { return GetThrowAction(BuildAttackDetectedMessage(result), 500) } -func ReportAttackDetected(res *utils.InterceptorResult) string { +func ReportAttackDetected(res *utils.InterceptorResult, inst *instance.RequestProcessorInstance) string { if res == nil { return "" } - grpc.OnAttackDetected(GetAttackDetectedProto(*res)) + grpc.OnAttackDetected(inst, GetAttackDetectedProto(*res, inst)) return GetAttackDetectedAction(*res) } diff --git a/lib/request-processor/config/config.go b/lib/request-processor/config/config.go index db52cc882..55c3a4d07 100644 --- a/lib/request-processor/config/config.go +++ b/lib/request-processor/config/config.go @@ -14,7 +14,7 @@ import ( func UpdateToken(inst *instance.RequestProcessorInstance, token string) bool { server := globals.GetServer(token) if server == nil { - log.Debugf("Server not found for token \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) + log.Debugf(inst, "Server not found for token \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) return false } @@ -22,12 +22,13 @@ func UpdateToken(inst *instance.RequestProcessorInstance, token string) bool { if inst.GetCurrentServer() == nil { inst.SetCurrentServer(server) } - log.Debugf("Token is the same as previous one, skipping config reload...") + log.Debugf(inst, "Token is the same as previous one, skipping config reload...") return false } + inst.SetCurrentToken(token) inst.SetCurrentServer(server) - log.Infof("Token changed to \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) + log.Infof(inst, "Token changed to \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(token)) return true } diff --git a/lib/request-processor/context/cache.go b/lib/request-processor/context/cache.go index 6ff6014c7..9c0befb4d 100644 --- a/lib/request-processor/context/cache.go +++ b/lib/request-processor/context/cache.go @@ -4,6 +4,7 @@ package context import "C" import ( "main/helpers" + "main/instance" "main/log" "main/utils" "strconv" @@ -19,16 +20,17 @@ import ( type ParseFunction func(string) map[string]interface{} -func ContextSetMap(contextId int, rawDataPtr **string, parsedPtr **map[string]interface{}, stringsPtr **map[string]string, parseFunc ParseFunction) { +func ContextSetMap(inst *instance.RequestProcessorInstance, contextId int, rawDataPtr **string, parsedPtr **map[string]interface{}, stringsPtr **map[string]string, parseFunc ParseFunction) { if stringsPtr != nil && *stringsPtr != nil { return } - if Context.Callback == nil { + c := GetContext(inst) + if c.Callback == nil { return } - contextData := Context.Callback(contextId) + contextData := c.Callback(inst, contextId) if rawDataPtr != nil { *rawDataPtr = &contextData } @@ -42,224 +44,256 @@ func ContextSetMap(contextId int, rawDataPtr **string, parsedPtr **map[string]in } } -func ContextSetString(context_id int, m **string) { +func ContextSetString(inst *instance.RequestProcessorInstance, context_id int, m **string) { if *m != nil { return } - if Context.Callback == nil { + c := GetContext(inst) + if c.Callback == nil { return } - temp := Context.Callback(context_id) + temp := c.Callback(inst, context_id) *m = &temp } -func ContextSetBody() { - ContextSetMap(C.CONTEXT_BODY, &Context.BodyRaw, &Context.BodyParsed, &Context.BodyParsedFlattened, utils.ParseBody) +func ContextSetBody(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetMap(inst, C.CONTEXT_BODY, &c.BodyRaw, &c.BodyParsed, &c.BodyParsedFlattened, utils.ParseBody) } -func ContextSetQuery() { - ContextSetMap(C.CONTEXT_QUERY, nil, &Context.QueryParsed, &Context.QueryParsedFlattened, utils.ParseQuery) +func ContextSetQuery(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetMap(inst, C.CONTEXT_QUERY, nil, &c.QueryParsed, &c.QueryParsedFlattened, utils.ParseQuery) } -func ContextSetCookies() { - ContextSetMap(C.CONTEXT_COOKIES, nil, &Context.CookiesParsed, &Context.CookiesParsedFlattened, utils.ParseCookies) +func ContextSetCookies(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetMap(inst, C.CONTEXT_COOKIES, nil, &c.CookiesParsed, &c.CookiesParsedFlattened, utils.ParseCookies) } -func ContextSetHeaders() { - ContextSetMap(C.CONTEXT_HEADERS, nil, &Context.HeadersParsed, &Context.HeadersParsedFlattened, utils.ParseHeaders) +func ContextSetHeaders(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetMap(inst, C.CONTEXT_HEADERS, nil, &c.HeadersParsed, &c.HeadersParsedFlattened, utils.ParseHeaders) } -func ContextSetRouteParams() { - ContextSetMap(C.CONTEXT_ROUTE, &Context.RouteParamsRaw, &Context.RouteParamsParsed, &Context.RouteParamsParsedFlattened, utils.ParseRouteParams) +func ContextSetRouteParams(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetMap(inst, C.CONTEXT_ROUTE, &c.RouteParamsRaw, &c.RouteParamsParsed, &c.RouteParamsParsedFlattened, utils.ParseRouteParams) } -func ContextSetStatusCode() { - if Context.StatusCode != nil { +func ContextSetStatusCode(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.StatusCode != nil { return } - if Context.Callback == nil { + if c.Callback == nil { return } - status_code_str := Context.Callback(C.CONTEXT_STATUS_CODE) + status_code_str := c.Callback(inst, C.CONTEXT_STATUS_CODE) status_code, err := strconv.Atoi(status_code_str) if err != nil { - log.Warnf("Error parsing status code %v: %v", status_code_str, err) + log.Warnf(inst, "Error parsing status code %v: %v", status_code_str, err) return } - Context.StatusCode = &status_code + c.StatusCode = &status_code } -func ContextSetRoute() { - ContextSetString(C.CONTEXT_ROUTE, &Context.Route) +func ContextSetRoute(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_ROUTE, &c.Route) } -func ContextSetParsedRoute() { - parsedRoute := utils.BuildRouteFromURL(GetRoute()) - Context.RouteParsed = &parsedRoute +func ContextSetParsedRoute(inst *instance.RequestProcessorInstance) { + parsedRoute := utils.BuildRouteFromURL(GetRoute(inst)) + c := GetContext(inst) + c.RouteParsed = &parsedRoute } -func ContextSetMethod() { - ContextSetString(C.CONTEXT_METHOD, &Context.Method) +func ContextSetMethod(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_METHOD, &c.Method) } -func ContextSetUrl() { - ContextSetString(C.CONTEXT_URL, &Context.URL) +func ContextSetUrl(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_URL, &c.URL) } -func ContextSetUserAgent() { - ContextSetString(C.CONTEXT_HEADER_USER_AGENT, &Context.UserAgent) +func ContextSetUserAgent(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_HEADER_USER_AGENT, &c.UserAgent) } -func ContextSetIp() { - if Context.IP != nil { +func ContextSetIp(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IP != nil { return } - if Context.Callback == nil { + if c.Callback == nil { return } - remoteAddress := Context.Callback(C.CONTEXT_REMOTE_ADDRESS) - xForwardedFor := Context.Callback(C.CONTEXT_HEADER_X_FORWARDED_FOR) - ip := utils.GetIpFromRequest(GetCurrentServer(), remoteAddress, xForwardedFor) - Context.IP = &ip + remoteAddress := c.Callback(inst, C.CONTEXT_REMOTE_ADDRESS) + xForwardedFor := c.Callback(inst, C.CONTEXT_HEADER_X_FORWARDED_FOR) + + server := c.inst.GetCurrentServer() + ip := utils.GetIpFromRequest(server, remoteAddress, xForwardedFor) + c.IP = &ip } -func ContextSetIsIpBypassed() { - if Context.IsIpBypassed != nil { +func ContextSetIsIpBypassed(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IsIpBypassed != nil { return } - isIpBypassed := utils.IsIpBypassed(GetCurrentServer(), GetIp()) - Context.IsIpBypassed = &isIpBypassed + server := c.inst.GetCurrentServer() + isIpBypassed := utils.IsIpBypassed(inst, server, GetIp(inst)) + c.IsIpBypassed = &isIpBypassed } -func ContextSetUserId() { - ContextSetString(C.CONTEXT_USER_ID, &Context.UserId) +func ContextSetUserId(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_USER_ID, &c.UserId) } -func ContextSetUserName() { - ContextSetString(C.CONTEXT_USER_NAME, &Context.UserName) +func ContextSetUserName(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + ContextSetString(inst, C.CONTEXT_USER_NAME, &c.UserName) } -func ContextSetRateLimitGroup() { - if Context.RateLimitGroup != nil { +func ContextSetRateLimitGroup(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.RateLimitGroup != nil { return } - if Context.Callback == nil { + if c.Callback == nil { return } - rateLimitGroup := Context.Callback(C.CONTEXT_RATE_LIMIT_GROUP) - Context.RateLimitGroup = &rateLimitGroup + rateLimitGroup := c.Callback(inst, C.CONTEXT_RATE_LIMIT_GROUP) + c.RateLimitGroup = &rateLimitGroup } -func ContextSetEndpointConfig() { - if Context.EndpointConfig != nil { +func ContextSetEndpointConfig(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.EndpointConfig != nil { return } - if GetCurrentServer() == nil { + // Per-thread isolation via sync.Map prevents context bleeding + server := c.inst.GetCurrentServer() + if server == nil { return } - endpointConfig := utils.GetEndpointConfig(GetCurrentServer(), GetMethod(), GetParsedRoute()) - Context.EndpointConfig = &endpointConfig + method := GetMethod(inst) + route := GetParsedRoute(inst) + endpointConfig := utils.GetEndpointConfig(server, method, route) + c.EndpointConfig = &endpointConfig } -func ContextSetWildcardEndpointsConfigs() { - if Context.WildcardEndpointsConfigs != nil { +func ContextSetWildcardEndpointsConfigs(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.WildcardEndpointsConfigs != nil { return } - if GetCurrentServer() == nil { + // Per-thread isolation via sync.Map prevents context bleeding + server := c.inst.GetCurrentServer() + if server == nil { return } - wildcardEndpointsConfigs := utils.GetWildcardEndpointsConfigs(GetCurrentServer(), GetMethod(), GetParsedRoute()) - Context.WildcardEndpointsConfigs = &wildcardEndpointsConfigs + wildcardEndpointsConfigs := utils.GetWildcardEndpointsConfigs(server, GetMethod(inst), GetParsedRoute(inst)) + c.WildcardEndpointsConfigs = &wildcardEndpointsConfigs } -func ContextSetIsEndpointProtectionTurnedOff() { - if Context.IsEndpointProtectionTurnedOff != nil { +func ContextSetIsEndpointProtectionTurnedOff(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IsEndpointProtectionTurnedOff != nil { return } isEndpointProtectionTurnedOff := false - endpointConfig := GetEndpointConfig() + endpointConfig := GetEndpointConfig(inst) if endpointConfig != nil { isEndpointProtectionTurnedOff = endpointConfig.ForceProtectionOff } if !isEndpointProtectionTurnedOff { - for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig() { + for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig(inst) { if wildcardEndpointConfig.ForceProtectionOff { isEndpointProtectionTurnedOff = true break } } } - Context.IsEndpointProtectionTurnedOff = &isEndpointProtectionTurnedOff + c.IsEndpointProtectionTurnedOff = &isEndpointProtectionTurnedOff } -func ContextSetIsEndpointConfigured() { - if Context.IsEndpointConfigured != nil { +func ContextSetIsEndpointConfigured(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IsEndpointConfigured != nil { return } IsEndpointConfigured := false - endpointConfig := GetEndpointConfig() + endpointConfig := GetEndpointConfig(inst) if endpointConfig != nil { IsEndpointConfigured = true } if !IsEndpointConfigured { - if len(GetWildcardEndpointsConfig()) != 0 { + if len(GetWildcardEndpointsConfig(inst)) != 0 { IsEndpointConfigured = true } } - Context.IsEndpointConfigured = &IsEndpointConfigured + c.IsEndpointConfigured = &IsEndpointConfigured } -func ContextSetIsEndpointRateLimitingEnabled() { - if Context.IsEndpointRateLimitingEnabled != nil { +func ContextSetIsEndpointRateLimitingEnabled(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IsEndpointRateLimitingEnabled != nil { return } IsEndpointRateLimitingEnabled := false - endpointConfig := GetEndpointConfig() + endpointConfig := GetEndpointConfig(inst) if endpointConfig != nil { IsEndpointRateLimitingEnabled = endpointConfig.RateLimiting.Enabled } if !IsEndpointRateLimitingEnabled { - for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig() { + for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig(inst) { if wildcardEndpointConfig.RateLimiting.Enabled { IsEndpointRateLimitingEnabled = true break } } } - Context.IsEndpointRateLimitingEnabled = &IsEndpointRateLimitingEnabled + c.IsEndpointRateLimitingEnabled = &IsEndpointRateLimitingEnabled } -func ContextSetIsEndpointIpAllowed() { - if Context.IsEndpointIpAllowed != nil { +func ContextSetIsEndpointIpAllowed(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + if c.IsEndpointIpAllowed != nil { return } - ip := GetIp() + ip := GetIp(inst) isEndpointIpAllowed := utils.NoConfig - server := GetCurrentServer() - endpointConfig := GetEndpointConfig() + server := c.inst.GetCurrentServer() + endpointConfig := GetEndpointConfig(inst) if endpointConfig != nil && server != nil { - isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(server, endpointConfig.AllowedIPAddresses, ip) + isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(inst, server, endpointConfig.AllowedIPAddresses, ip) } if isEndpointIpAllowed == utils.NoConfig && server != nil { - for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig() { - isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(server, wildcardEndpointConfig.AllowedIPAddresses, ip) + for _, wildcardEndpointConfig := range GetWildcardEndpointsConfig(inst) { + isEndpointIpAllowed = utils.IsIpAllowedOnEndpoint(inst, server, wildcardEndpointConfig.AllowedIPAddresses, ip) if isEndpointIpAllowed != utils.NoConfig { break } @@ -268,9 +302,10 @@ func ContextSetIsEndpointIpAllowed() { isEndpointIpAllowedBool := isEndpointIpAllowed != utils.NotFound - Context.IsEndpointIpAllowed = &isEndpointIpAllowedBool + c.IsEndpointIpAllowed = &isEndpointIpAllowedBool } -func ContextSetIsEndpointRateLimited() { - Context.IsEndpointRateLimited = true +func ContextSetIsEndpointRateLimited(inst *instance.RequestProcessorInstance) { + c := GetContext(inst) + c.IsEndpointRateLimited = true } diff --git a/lib/request-processor/context/context_for_unit_tests.go b/lib/request-processor/context/context_for_unit_tests.go index a8761476b..e64a054b6 100644 --- a/lib/request-processor/context/context_for_unit_tests.go +++ b/lib/request-processor/context/context_for_unit_tests.go @@ -6,12 +6,14 @@ import ( "encoding/json" "fmt" . "main/aikido_types" + "main/globals" + "main/instance" ) var TestContext map[string]string var TestServer *ServerData // Test server for unit tests -func UnitTestsCallback(context_id int) string { +func UnitTestsCallback(inst *instance.RequestProcessorInstance, context_id int) string { switch context_id { case C.CONTEXT_REMOTE_ADDRESS: return TestContext["remoteAddress"] @@ -41,19 +43,44 @@ func UnitTestsCallback(context_id int) string { return "" } -func LoadForUnitTests(context map[string]string) { - Context.Callback = UnitTestsCallback +func LoadForUnitTests(context map[string]string) *instance.RequestProcessorInstance { + tid := getThreadID() + + mockInst := instance.NewRequestProcessorInstance(false) + if TestServer != nil { + mockInst.SetCurrentServer(TestServer) + mockInst.SetCurrentToken(TestServer.AikidoConfig.Token) + } + mockInst.SetThreadID(tid) + + ctx := &RequestContextData{ + inst: mockInst, + Callback: UnitTestsCallback, + } + globals.ContextData.Store(tid, ctx) + globals.ContextInstances.Store(tid, nil) + TestContext = context + return mockInst } func UnloadForUnitTests() { - Context = RequestContextData{} - EventContext = EventContextData{} + tid := getThreadID() + globals.ContextData.Delete(tid) + globals.ContextInstances.Delete(tid) + globals.EventContextData.Delete(tid) TestServer = nil + TestContext = nil } -func SetTestServer(server *ServerData) { +func SetTestServer(inst *instance.RequestProcessorInstance, server *ServerData) { TestServer = server + + c := GetContext(inst) + if c != nil && c.inst != nil && server != nil { + c.inst.SetCurrentServer(server) + c.inst.SetCurrentToken(server.AikidoConfig.Token) + } } // GetTestServer returns the current test server, or nil if not set diff --git a/lib/request-processor/context/data_sources.go b/lib/request-processor/context/data_sources.go index 6aaae6972..49b254c9f 100644 --- a/lib/request-processor/context/data_sources.go +++ b/lib/request-processor/context/data_sources.go @@ -1,8 +1,10 @@ package context +import "main/instance" + type Source struct { Name string - CacheGet func() map[string]string + CacheGet func(*instance.RequestProcessorInstance) map[string]string } var SOURCES = []Source{ diff --git a/lib/request-processor/context/event_context.go b/lib/request-processor/context/event_context.go index 0ce9feebf..b52358f7f 100644 --- a/lib/request-processor/context/event_context.go +++ b/lib/request-processor/context/event_context.go @@ -3,6 +3,8 @@ package context // #include "../../API.h" import "C" import ( + "main/globals" + "main/instance" "main/utils" ) @@ -15,10 +17,22 @@ type EventContextData struct { CurrentSsrfInterceptorResult *utils.InterceptorResult } -var EventContext EventContextData +func getEventContext(inst *instance.RequestProcessorInstance) *EventContextData { + if inst == nil { + return nil + } + tid := inst.GetThreadID() + // Create new event context if it doesn't exist + newCtx := &EventContextData{} + return globals.LoadOrStoreInThreadStorage(tid, newCtx, &globals.EventContextData) +} -func ResetEventContext() bool { - EventContext = EventContextData{} +func ResetEventContext(inst *instance.RequestProcessorInstance) bool { + if inst == nil { + return false + } + tid := inst.GetThreadID() + globals.StoreInThreadStorage(tid, &EventContextData{}, &globals.EventContextData) return true } @@ -33,10 +47,17 @@ A partial interceptor result stores the payload that matched the user input, the PHP function that was called, ..., basically the data needed for reporting if this actually turns into a detection at a later stage. */ -func EventContextSetCurrentSsrfInterceptorResult(interceptorResult *utils.InterceptorResult) { - EventContext.CurrentSsrfInterceptorResult = interceptorResult +func EventContextSetCurrentSsrfInterceptorResult(inst *instance.RequestProcessorInstance, interceptorResult *utils.InterceptorResult) { + ctx := getEventContext(inst) + if ctx != nil { + ctx.CurrentSsrfInterceptorResult = interceptorResult + } } -func GetCurrentSsrfInterceptorResult() *utils.InterceptorResult { - return EventContext.CurrentSsrfInterceptorResult +func GetCurrentSsrfInterceptorResult(inst *instance.RequestProcessorInstance) *utils.InterceptorResult { + ctx := getEventContext(inst) + if ctx == nil { + return nil + } + return ctx.CurrentSsrfInterceptorResult } diff --git a/lib/request-processor/context/event_getters.go b/lib/request-processor/context/event_getters.go index 5209c6dea..5dca1a64e 100644 --- a/lib/request-processor/context/event_getters.go +++ b/lib/request-processor/context/event_getters.go @@ -4,55 +4,57 @@ package context import "C" import ( "main/helpers" + "main/instance" "net/url" ) -func GetOutgoingRequestHostnameAndPort() (string, uint32) { - return getHostNameAndPort(C.OUTGOING_REQUEST_URL, C.OUTGOING_REQUEST_PORT) +func GetOutgoingRequestHostnameAndPort(inst *instance.RequestProcessorInstance) (string, uint32) { + return getHostNameAndPort(inst, C.OUTGOING_REQUEST_URL, C.OUTGOING_REQUEST_PORT) } -func GetOutgoingRequestEffectiveHostnameAndPort() (string, uint32) { - return getHostNameAndPort(C.OUTGOING_REQUEST_EFFECTIVE_URL, C.OUTGOING_REQUEST_EFFECTIVE_URL_PORT) +func GetOutgoingRequestEffectiveHostnameAndPort(inst *instance.RequestProcessorInstance) (string, uint32) { + return getHostNameAndPort(inst, C.OUTGOING_REQUEST_EFFECTIVE_URL, C.OUTGOING_REQUEST_EFFECTIVE_URL_PORT) } -func GetOutgoingRequestResolvedIp() string { - return Context.Callback(C.OUTGOING_REQUEST_RESOLVED_IP) +func GetOutgoingRequestResolvedIp(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.OUTGOING_REQUEST_RESOLVED_IP) } -func GetFunctionName() string { - return Context.Callback(C.FUNCTION_NAME) +func GetFunctionName(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.FUNCTION_NAME) } -func GetCmd() string { - return Context.Callback(C.CMD) +func GetCmd(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.CMD) } -func GetFilename() string { - return Context.Callback(C.FILENAME) +func GetFilename(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.FILENAME) } -func GetFilename2() string { - return Context.Callback(C.FILENAME2) +func GetFilename2(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.FILENAME2) } -func GetSqlQuery() string { - return Context.Callback(C.SQL_QUERY) +func GetSqlQuery(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.SQL_QUERY) } -func GetSqlDialect() string { - return Context.Callback(C.SQL_DIALECT) +func GetSqlDialect(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.SQL_DIALECT) } -func GetModule() string { - return Context.Callback(C.MODULE) +func GetModule(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.MODULE) } -func GetStackTrace() string { - return Context.Callback(C.STACK_TRACE) +func GetStackTrace(inst *instance.RequestProcessorInstance) string { + return GetContext(inst).Callback(inst, C.STACK_TRACE) } -func getHostNameAndPort(urlCallbackId int, portCallbackId int) (string, uint32) { // urlcallbackid is the type of data we request, eg C.OUTGOING_REQUEST_URL - urlStr := Context.Callback(urlCallbackId) +func getHostNameAndPort(inst *instance.RequestProcessorInstance, urlCallbackId int, portCallbackId int) (string, uint32) { + ctx := GetContext(inst) + urlStr := ctx.Callback(inst, urlCallbackId) urlParsed, err := url.Parse(urlStr) if err != nil { return "", 0 @@ -60,7 +62,7 @@ func getHostNameAndPort(urlCallbackId int, portCallbackId int) (string, uint32) hostname := urlParsed.Hostname() portFromURL := helpers.GetPortFromURL(urlParsed) - portStr := Context.Callback(portCallbackId) + portStr := ctx.Callback(inst, portCallbackId) port := helpers.ParsePort(portStr) if port == 0 { port = portFromURL diff --git a/lib/request-processor/context/request_context.go b/lib/request-processor/context/request_context.go index 81b237421..f2a70968f 100644 --- a/lib/request-processor/context/request_context.go +++ b/lib/request-processor/context/request_context.go @@ -1,19 +1,23 @@ package context // #include "../../API.h" +// #include +// static unsigned long get_thread_id() { return (unsigned long)pthread_self(); } import "C" import ( . "main/aikido_types" "main/globals" + "main/instance" "main/log" "unsafe" ) -type CallbackFunction func(int) string +type CallbackFunction func(*instance.RequestProcessorInstance, int) string /* Request level context cache (changes on each PHP request) */ type RequestContextData struct { - Callback CallbackFunction // callback to access data from the PHP layer (C++ extension) about the current request and current event + inst *instance.RequestProcessorInstance // CACHED: Instance pointer for fast access + Callback CallbackFunction // callback to access data from the PHP layer (C++ extension) about the current request and current event Method *string Route *string RouteParsed *string @@ -46,197 +50,203 @@ type RequestContextData struct { RouteParamsParsedFlattened *map[string]string } -var Context RequestContextData -var contextInstance unsafe.Pointer - -type requestProcessorInstance struct { - CurrentToken string - CurrentServer *ServerData +func GetServerPID() int32 { + return globals.EnvironmentConfig.ServerPID } -func GetCurrentServer() (result *ServerData) { - defer func() { - if r := recover(); r != nil { - result = nil - } - }() +func Init(instPtr unsafe.Pointer, callback CallbackFunction) bool { + tid := getThreadID() - if contextInstance == nil { - if TestServer != nil { - return GetTestServer() - } - return nil + inst := instance.GetInstance(instPtr) + if inst != nil { + inst.SetThreadID(tid) } - instPtr := (*requestProcessorInstance)(contextInstance) - if instPtr == nil { - if TestServer != nil { - return GetTestServer() - } - return nil - } - result = instPtr.CurrentServer - return result -} + globals.ContextInstances.Store(tid, instPtr) -func GetCurrentServerToken() string { - if contextInstance == nil { - return "" + ctx := &RequestContextData{ + inst: inst, // Store instance in context for fast access + Callback: callback, } + globals.ContextData.Store(tid, ctx) - instPtr := (*requestProcessorInstance)(contextInstance) - if instPtr == nil { - return "" - } - return instPtr.CurrentToken + return true } -func GetServerPID() int32 { - return globals.EnvironmentConfig.ServerPID +func GetContext(inst *instance.RequestProcessorInstance) *RequestContextData { + tid := inst.GetThreadID() + return globals.GetFromThreadStorage[*RequestContextData](tid, &globals.ContextData) } -func GetInstancePtr() unsafe.Pointer { - return contextInstance +func (ctx *RequestContextData) GetInstance() *instance.RequestProcessorInstance { + return ctx.inst } -func Init(instPtr unsafe.Pointer, callback CallbackFunction) bool { - contextInstance = instPtr - Context = RequestContextData{ - Callback: callback, - } - return true +// getThreadID is only called once during Init to bootstrap the threadID cache in inst +func getThreadID() uint64 { + return uint64(C.get_thread_id()) } -func Clear() bool { - Context = RequestContextData{ - Callback: Context.Callback, +func Clear(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + *ctx = RequestContextData{ + inst: inst, + Callback: ctx.Callback, } - ResetEventContext() + ResetEventContext(inst) return true } -func GetFromCache[T any](fetchDataFn func(), s **T) T { +func GetFromCache[T any](inst *instance.RequestProcessorInstance, fetchDataFn func(*instance.RequestProcessorInstance), s **T) T { if fetchDataFn != nil { - fetchDataFn() + fetchDataFn(inst) } if *s == nil { var t T - if GetCurrentServer() != nil { - log.Warnf("Error getting from cache. Returning default value %v...", t) + c := GetContext(inst) + if c != nil && c.inst != nil && inst.GetCurrentServer() != nil { + log.Warnf(inst, "Error getting from cache. Returning default value %v...", t) } return t } return **s } -func GetIp() string { - return GetFromCache(ContextSetIp, &Context.IP) +func GetMethod(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetMethod, &ctx.Method) } -func GetMethod() string { - return GetFromCache(ContextSetMethod, &Context.Method) +func GetRoute(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetRoute, &ctx.Route) } -func GetRoute() string { - return GetFromCache(ContextSetRoute, &Context.Route) +func GetIp(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIp, &ctx.IP) } -func GetParsedRoute() string { - return GetFromCache(ContextSetParsedRoute, &Context.RouteParsed) +func GetUserId(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetUserId, &ctx.UserId) } -func GetUrl() string { - return GetFromCache(ContextSetUrl, &Context.URL) +func GetUserAgent(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetUserAgent, &ctx.UserAgent) } -func GetStatusCode() int { - return GetFromCache(ContextSetStatusCode, &Context.StatusCode) +func GetStatusCode(inst *instance.RequestProcessorInstance) int { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetStatusCode, &ctx.StatusCode) } -func IsIpBypassed() bool { - return GetFromCache(ContextSetIsIpBypassed, &Context.IsIpBypassed) +func GetUrl(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetUrl, &ctx.URL) } -func GetBodyRaw() string { - return GetFromCache(ContextSetBody, &Context.BodyRaw) +func GetBodyRaw(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetBody, &ctx.BodyRaw) } -func GetBodyParsed() map[string]interface{} { - return GetFromCache(ContextSetBody, &Context.BodyParsed) +func GetParsedRoute(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetParsedRoute, &ctx.RouteParsed) } -func GetQueryParsed() map[string]interface{} { - return GetFromCache(ContextSetQuery, &Context.QueryParsed) +func GetRateLimitGroup(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetRateLimitGroup, &ctx.RateLimitGroup) } -func GetCookiesParsed() map[string]interface{} { - return GetFromCache(ContextSetCookies, &Context.CookiesParsed) +func GetQueryParsed(inst *instance.RequestProcessorInstance) map[string]interface{} { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetQuery, &ctx.QueryParsed) } -func GetHeadersParsed() map[string]interface{} { - return GetFromCache(ContextSetHeaders, &Context.HeadersParsed) +func GetHeadersParsed(inst *instance.RequestProcessorInstance) map[string]interface{} { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetHeaders, &ctx.HeadersParsed) } -func GetBodyParsedFlattened() map[string]string { - return GetFromCache(ContextSetBody, &Context.BodyParsedFlattened) +func IsIpBypassed(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIsIpBypassed, &ctx.IsIpBypassed) } -func GetQueryParsedFlattened() map[string]string { - return GetFromCache(ContextSetQuery, &Context.QueryParsedFlattened) +func IsEndpointRateLimited(inst *instance.RequestProcessorInstance) bool { + return GetContext(inst).IsEndpointRateLimited } -func GetCookiesParsedFlattened() map[string]string { - return GetFromCache(ContextSetCookies, &Context.CookiesParsedFlattened) +func IsEndpointProtectionTurnedOff(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIsEndpointProtectionTurnedOff, &ctx.IsEndpointProtectionTurnedOff) } -func GetRouteParamsParsedFlattened() map[string]string { - return GetFromCache(ContextSetRouteParams, &Context.RouteParamsParsedFlattened) +func GetBodyParsed(inst *instance.RequestProcessorInstance) map[string]interface{} { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetBody, &ctx.BodyParsed) } -func GetHeadersParsedFlattened() map[string]string { - return GetFromCache(ContextSetHeaders, &Context.HeadersParsedFlattened) +func GetCookiesParsed(inst *instance.RequestProcessorInstance) map[string]interface{} { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetCookies, &ctx.CookiesParsed) } -func GetUserAgent() string { - return GetFromCache(ContextSetUserAgent, &Context.UserAgent) +func GetBodyParsedFlattened(inst *instance.RequestProcessorInstance) map[string]string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetBody, &ctx.BodyParsedFlattened) } -func GetUserId() string { - return GetFromCache(ContextSetUserId, &Context.UserId) +func GetQueryParsedFlattened(inst *instance.RequestProcessorInstance) map[string]string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetQuery, &ctx.QueryParsedFlattened) } -func GetUserName() string { - return GetFromCache(ContextSetUserName, &Context.UserName) +func GetCookiesParsedFlattened(inst *instance.RequestProcessorInstance) map[string]string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetCookies, &ctx.CookiesParsedFlattened) } -func GetRateLimitGroup() string { - return GetFromCache(ContextSetRateLimitGroup, &Context.RateLimitGroup) +func GetRouteParamsParsedFlattened(inst *instance.RequestProcessorInstance) map[string]string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetRouteParams, &ctx.RouteParamsParsedFlattened) } -func GetEndpointConfig() *EndpointData { - return GetFromCache(ContextSetEndpointConfig, &Context.EndpointConfig) +func GetHeadersParsedFlattened(inst *instance.RequestProcessorInstance) map[string]string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetHeaders, &ctx.HeadersParsedFlattened) } -func GetWildcardEndpointsConfig() []EndpointData { - return GetFromCache(ContextSetWildcardEndpointsConfigs, &Context.WildcardEndpointsConfigs) +func GetUserName(inst *instance.RequestProcessorInstance) string { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetUserName, &ctx.UserName) } -func IsEndpointConfigured() bool { - return GetFromCache(ContextSetIsEndpointConfigured, &Context.IsEndpointConfigured) +func GetEndpointConfig(inst *instance.RequestProcessorInstance) *EndpointData { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetEndpointConfig, &ctx.EndpointConfig) } -func IsEndpointRateLimitingEnabled() bool { - return GetFromCache(ContextSetIsEndpointRateLimitingEnabled, &Context.IsEndpointRateLimitingEnabled) +func GetWildcardEndpointsConfig(inst *instance.RequestProcessorInstance) []EndpointData { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetWildcardEndpointsConfigs, &ctx.WildcardEndpointsConfigs) } -func IsEndpointIpAllowed() bool { - return GetFromCache(ContextSetIsEndpointIpAllowed, &Context.IsEndpointIpAllowed) +func IsEndpointConfigured(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIsEndpointConfigured, &ctx.IsEndpointConfigured) } -func IsEndpointProtectionTurnedOff() bool { - return GetFromCache(ContextSetIsEndpointProtectionTurnedOff, &Context.IsEndpointProtectionTurnedOff) +func IsEndpointRateLimitingEnabled(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIsEndpointRateLimitingEnabled, &ctx.IsEndpointRateLimitingEnabled) } -func IsEndpointRateLimited() bool { - return Context.IsEndpointRateLimited +func IsEndpointIpAllowed(inst *instance.RequestProcessorInstance) bool { + ctx := GetContext(inst) + return GetFromCache(inst, ContextSetIsEndpointIpAllowed, &ctx.IsEndpointIpAllowed) } diff --git a/lib/request-processor/globals/globals.go b/lib/request-processor/globals/globals.go index 5ebf003eb..bb7645bce 100644 --- a/lib/request-processor/globals/globals.go +++ b/lib/request-processor/globals/globals.go @@ -1,14 +1,55 @@ package globals import ( - . "main/aikido_types" + "log" + "os" "sync" + + . "main/aikido_types" ) +// =========================== +// Server Configuration +// =========================== + var EnvironmentConfig EnvironmentConfigData var Servers = make(map[string]*ServerData) var ServersMutex sync.RWMutex +// =========================== +// Per-Thread Context Storage +// =========================== +// Thread-safe per-thread context storage for ZTS (Zend Thread Safety) +// Using pthread ID as key ensures each OS thread has isolated context + +var ( + ContextInstances sync.Map // map[uint64]unsafe.Pointer - pthread ID -> instance pointer + ContextData sync.Map // map[uint64]*RequestContextData - pthread ID -> request context + EventContextData sync.Map // map[uint64]*EventContextData - pthread ID -> event context +) + +// =========================== +// Logging State +// =========================== + +type LogLevel int + +const ( + LogDebugLevel LogLevel = iota + LogInfoLevel + LogWarnLevel + LogErrorLevel +) + +var ( + CurrentLogLevel = LogErrorLevel + Logger = log.New(os.Stdout, "", 0) + CliLogging = true + LogFilePath = "" + LogMutex sync.RWMutex + LogFile *os.File +) + func NewServerData() *ServerData { return &ServerData{ AikidoConfig: AikidoConfigData{}, @@ -57,3 +98,32 @@ const ( Version = "1.4.8" SocketPath = "/run/aikido-" + Version + "/aikido-agent.sock" ) + +func GetFromThreadStorage[T any](threadID uint64, storage *sync.Map) T { + if val, ok := storage.Load(threadID); ok { + return val.(T) + } + var zero T + return zero +} + +func StoreInThreadStorage(threadID uint64, data interface{}, storage *sync.Map) { + storage.Store(threadID, data) +} + +func LoadOrStoreInThreadStorage[T any](threadID uint64, newData T, storage *sync.Map) T { + if val, ok := storage.Load(threadID); ok { + return val.(T) + } + storage.Store(threadID, newData) + return newData +} + +func DeleteFromThreadStorage(threadID uint64, storage *sync.Map) { + storage.Delete(threadID) +} + +func HasInThreadStorage(threadID uint64, storage *sync.Map) bool { + _, ok := storage.Load(threadID) + return ok +} diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index e1ee38afc..b0507eba7 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "main/globals" + "main/instance" "main/log" "main/utils" "time" @@ -31,7 +32,7 @@ func Init() { client = protos.NewAikidoClient(conn) - log.Debugf("Current connection state: %s\n", conn.GetState().String()) + log.Debugf(nil, "Current connection state: %s\n", conn.GetState().String()) } func Uninit() { @@ -65,15 +66,15 @@ func SendAikidoConfig(server *ServerData) { RequestProcessorPid: globals.EnvironmentConfig.RequestProcessorPID, }) if err != nil { - log.Warnf("Could not send Aikido Config: %v", err) + log.Warnf(nil, "Could not send Aikido Config: %v", err) return } - log.Debugf("Aikido config sent via socket!") + log.Debugf(nil, "Aikido config sent via socket!") } /* Send outgoing domain to Aikido Agent via gRPC */ -func OnDomain(server *ServerData, domain string, port uint32) { +func OnDomain(threadID uint64, server *ServerData, domain string, port uint32) { if client == nil { return } @@ -83,11 +84,11 @@ func OnDomain(server *ServerData, domain string, port uint32) { _, err := client.OnDomain(ctx, &protos.Domain{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Domain: domain, Port: port}) if err != nil { - log.Warnf("Could not send domain %v: %v", domain, err) + log.WarnfWithThreadID(threadID, "Could not send domain %v: %v", domain, err) return } - log.Debugf("Domain sent via socket: %v:%v", domain, port) + log.DebugfWithThreadID(threadID, "Domain sent via socket: %v:%v", domain, port) } /* Send packages to Aikido Agent via gRPC */ @@ -101,15 +102,15 @@ func OnPackages(server *ServerData, packages map[string]string) { _, err := client.OnPackages(ctx, &protos.Packages{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Packages: packages}) if err != nil { - log.Warnf("Could not send packages %v: %v", packages, err) + log.Warnf(nil, "Could not send packages %v: %v", packages, err) return } - log.Debugf("Packages sent via socket!") + log.Debugf(nil, "Packages sent via socket!") } /* Send request metadata (route & method) to Aikido Agent via gRPC */ -func GetRateLimitingStatus(server *ServerData, method string, route string, routeParsed string, user string, ip string, rateLimitGroup string, timeout time.Duration) *protos.RateLimitingStatus { +func GetRateLimitingStatus(inst *instance.RequestProcessorInstance, server *ServerData, method string, route string, routeParsed string, user string, ip string, rateLimitGroup string, timeout time.Duration) *protos.RateLimitingStatus { if client == nil || server == nil { return nil } @@ -119,11 +120,11 @@ func GetRateLimitingStatus(server *ServerData, method string, route string, rout RateLimitingStatus, err := client.GetRateLimitingStatus(ctx, &protos.RateLimitingInfo{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Method: method, Route: route, RouteParsed: routeParsed, User: user, Ip: ip, RateLimitGroup: rateLimitGroup}) if err != nil { - log.Warnf("Cannot get rate limiting status %v %v: %v", method, route, err) + log.Warnf(inst, "Cannot get rate limiting status %v %v: %v", method, route, err) return nil } - log.Debugf("Rate limiting status for (%v %v) sent via socket and got reply (%v)", method, route, RateLimitingStatus) + log.Debugf(inst, "Rate limiting status for (%v %v) sent via socket and got reply (%v)", method, route, RateLimitingStatus) return RateLimitingStatus } @@ -137,7 +138,7 @@ func OnRequestShutdown(params RequestShutdownParams) { defer cancel() _, err := client.OnRequestShutdown(ctx, &protos.RequestMetadataShutdown{ - Token: params.Server.AikidoConfig.Token, + Token: params.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Method: params.Method, Route: params.Route, @@ -153,11 +154,11 @@ func OnRequestShutdown(params RequestShutdownParams) { ShouldDiscoverRoute: params.ShouldDiscoverRoute, }) if err != nil { - log.Warnf("Could not send request metadata %v %v %v: %v", params.Method, params.Route, params.StatusCode, err) + log.Warnf(nil, "Could not send request metadata %v %v %v: %v", params.Method, params.Route, params.StatusCode, err) return } - log.Debugf("Request metadata sent via socket (%v %v %v)", params.Method, params.Route, params.StatusCode) + log.Debugf(nil, "Request metadata sent via socket (%v %v %v)", params.Method, params.Route, params.StatusCode) } /* Get latest cloud config from Aikido Agent via gRPC */ @@ -169,13 +170,25 @@ func GetCloudConfig(server *ServerData, timeout time.Duration) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() - cloudConfig, err := client.GetCloudConfig(ctx, &protos.CloudConfigUpdatedAt{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, ConfigUpdatedAt: utils.GetCloudConfigUpdatedAt(server)}) + cloudConfig, err := client.GetCloudConfig(ctx, &protos.CloudConfigUpdatedAt{ + Token: server.AikidoConfig.Token, + ServerPid: globals.EnvironmentConfig.ServerPID, + ConfigUpdatedAt: utils.GetCloudConfigUpdatedAt(server), + }) + if err != nil { - log.Debugf("Could not get cloud config for server \"AIK_RUNTIME_***%s\": %v", utils.AnonymizeToken(server.AikidoConfig.Token), err) + log.Debugf(nil, "Could not get cloud config for server \"AIK_RUNTIME_***%s\": %v", utils.AnonymizeToken(server.AikidoConfig.Token), err) + return + } + + if cloudConfig == nil { + log.Debugf(nil, "Cloud config not updated for server \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(server.AikidoConfig.Token)) return } - log.Debugf("Got cloud config for server \"AIK_RUNTIME_***%s\"!", utils.AnonymizeToken(server.AikidoConfig.Token)) + fmt.Printf("[GetCloudConfig] Successfully received cloud config for token \"AIK_RUNTIME_***%s\", ConfigUpdatedAt=%d, endpoints=%d\n", + utils.AnonymizeToken(server.AikidoConfig.Token), cloudConfig.ConfigUpdatedAt, len(cloudConfig.Endpoints)) + log.Debugf(nil, "Got cloud config for server \"AIK_RUNTIME_***%s\"!", utils.AnonymizeToken(server.AikidoConfig.Token)) setCloudConfig(server, cloudConfig) } @@ -185,7 +198,7 @@ func GetCloudConfigForAllServers(timeout time.Duration) { } } -func OnUserEvent(server *ServerData, id string, username string, ip string) { +func OnUserEvent(threadID uint64, server *ServerData, id string, username string, ip string) { if client == nil { return } @@ -195,14 +208,14 @@ func OnUserEvent(server *ServerData, id string, username string, ip string) { _, err := client.OnUser(ctx, &protos.User{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Id: id, Username: username, Ip: ip}) if err != nil { - log.Warnf("Could not send user event %v %v %v: %v", id, username, ip, err) + log.WarnfWithThreadID(threadID, "Could not send user event %v %v %v: %v", id, username, ip, err) return } - log.Debugf("User event sent via socket (%v %v %v)", id, username, ip) + log.DebugfWithThreadID(threadID, "User event sent via socket (%v %v %v)", id, username, ip) } -func OnAttackDetected(attackDetected *protos.AttackDetected) { +func OnAttackDetected(inst *instance.RequestProcessorInstance, attackDetected *protos.AttackDetected) { if client == nil { return } @@ -212,13 +225,13 @@ func OnAttackDetected(attackDetected *protos.AttackDetected) { _, err := client.OnAttackDetected(ctx, attackDetected) if err != nil { - log.Warnf("Could not send attack detected event") + log.Warnf(inst, "Could not send attack detected event") return } - log.Debugf("Attack detected event sent via socket") + log.Debugf(inst, "Attack detected event sent via socket") } -func OnMonitoredSinkStats(server *ServerData, sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { +func OnMonitoredSinkStats(threadID uint64, server *ServerData, sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { if client == nil || server == nil { return } @@ -239,13 +252,13 @@ func OnMonitoredSinkStats(server *ServerData, sink, kind string, attacksDetected Timings: timings, }) if err != nil { - log.Warnf("Could not send monitored sink stats event") + log.WarnfWithThreadID(threadID, "Could not send monitored sink stats event") return } - log.Debugf("Monitored sink stats for sink \"%s\" sent via socket", sink) + log.DebugfWithThreadID(threadID, "Monitored sink stats for sink \"%s\" sent via socket", sink) } -func OnMiddlewareInstalled(server *ServerData) { +func OnMiddlewareInstalled(threadID uint64, server *ServerData) { if client == nil || server == nil { return } @@ -255,13 +268,13 @@ func OnMiddlewareInstalled(server *ServerData) { _, err := client.OnMiddlewareInstalled(ctx, &protos.MiddlewareInstalledInfo{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) if err != nil { - log.Warnf("Could not call OnMiddlewareInstalled") + log.WarnfWithThreadID(threadID, "Could not call OnMiddlewareInstalled") return } - log.Debugf("OnMiddlewareInstalled sent via socket") + log.DebugfWithThreadID(threadID, "OnMiddlewareInstalled sent via socket") } -func OnMonitoredIpMatch(server *ServerData, lists []utils.IpListMatch) { +func OnMonitoredIpMatch(threadID uint64, server *ServerData, lists []utils.IpListMatch) { if client == nil || len(lists) == 0 { return } @@ -276,13 +289,13 @@ func OnMonitoredIpMatch(server *ServerData, lists []utils.IpListMatch) { _, err := client.OnMonitoredIpMatch(ctx, &protos.MonitoredIpMatch{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Lists: protosLists}) if err != nil { - log.Warnf("Could not call OnMonitoredIpMatch") + log.WarnfWithThreadID(threadID, "Could not call OnMonitoredIpMatch") return } - log.Debugf("OnMonitoredIpMatch sent via socket") + log.DebugfWithThreadID(threadID, "OnMonitoredIpMatch sent via socket") } -func OnMonitoredUserAgentMatch(server *ServerData, lists []string) { +func OnMonitoredUserAgentMatch(threadID uint64, server *ServerData, lists []string) { if client == nil || len(lists) == 0 { return } @@ -292,8 +305,8 @@ func OnMonitoredUserAgentMatch(server *ServerData, lists []string) { _, err := client.OnMonitoredUserAgentMatch(ctx, &protos.MonitoredUserAgentMatch{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Lists: lists}) if err != nil { - log.Warnf("Could not call OnMonitoredUserAgentMatch") + log.WarnfWithThreadID(threadID, "Could not call OnMonitoredUserAgentMatch") return } - log.Debugf("OnMonitoredUserAgentMatch sent via socket") + log.DebugfWithThreadID(threadID, "OnMonitoredUserAgentMatch sent via socket") } diff --git a/lib/request-processor/grpc/config.go b/lib/request-processor/grpc/config.go index 3af692fe2..12532e3be 100644 --- a/lib/request-processor/grpc/config.go +++ b/lib/request-processor/grpc/config.go @@ -24,7 +24,7 @@ func buildIpList(cloudIpList map[string]*protos.IpList) map[string]IpList { for ipListKey, protoIpList := range cloudIpList { ipSet, err := utils.BuildIpList(protoIpList.Description, protoIpList.Ips) if err != nil { - log.Errorf("Error building IP list: %s\n", err) + log.Errorf(nil, "Error building IP list: %s\n", err) continue } ipList[ipListKey] = *ipSet @@ -35,7 +35,7 @@ func buildIpList(cloudIpList map[string]*protos.IpList) map[string]IpList { func getEndpointData(ep *protos.Endpoint) EndpointData { allowedIPSet, err := utils.BuildIpSet(ep.AllowedIPAddresses) if err != nil { - log.Errorf("Error building allowed IP set: %s\n", err) + log.Errorf(nil, "Error building allowed IP set: %s\n", err) } endpointData := EndpointData{ ForceProtectionOff: ep.ForceProtectionOff, @@ -71,7 +71,7 @@ func buildUserAgentsRegexpFromProto(userAgents string) *regexp.Regexp { } userAgentsRegexp, err := regexp.Compile("(?i)" + userAgents) if err != nil { - log.Errorf("Error compiling user agents regex: %s\n", err) + log.Errorf(nil, "Error compiling user agents regex: %s\n", err) return nil } return userAgentsRegexp @@ -114,7 +114,7 @@ func setCloudConfig(server *ServerData, cloudConfigFromAgent *protos.CloudConfig bypassedIPSet, bypassedIPSetErr := utils.BuildIpSet(cloudConfigFromAgent.BypassedIps) server.CloudConfig.BypassedIps = bypassedIPSet if bypassedIPSet == nil { - log.Errorf("Error building bypassed IP set: %s\n", bypassedIPSetErr) + log.Errorf(nil, "Error building bypassed IP set: %s\n", bypassedIPSetErr) } if cloudConfigFromAgent.Block { diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index f0b3524c6..f6d5997b9 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -30,44 +30,45 @@ func GetAction(actionHandling, actionType, trigger, description, data string, re } func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { - log.Debugf("OnGetBlockingStatus called!") + log.Debugf(inst, "OnGetBlockingStatus called!") server := inst.GetCurrentServer() if server == nil { return "" } if !server.MiddlewareInstalled { - go grpc.OnMiddlewareInstalled(server) + threadID := inst.GetThreadID() + go grpc.OnMiddlewareInstalled(threadID, server) server.MiddlewareInstalled = true } - userId := context.GetUserId() + userId := context.GetUserId(inst) if utils.IsUserBlocked(server, userId) { - log.Infof("User \"%s\" is blocked!", userId) + log.Infof(inst, "User \"%s\" is blocked!", userId) return GetAction("store", "blocked", "user", "user blocked from config", userId, 403) } autoBlockingStatus := OnGetAutoBlockingStatus(inst) - if context.IsIpBypassed() { + if context.IsIpBypassed(inst) { return "" } - if context.IsEndpointRateLimitingEnabled() { + if context.IsEndpointRateLimitingEnabled(inst) { // If request is monitored for rate limiting, // do a sync call via gRPC to see if the request should be blocked or not - method := context.GetMethod() - route := context.GetRoute() - ip := context.GetIp() - rateLimitGroup := context.GetRateLimitGroup() - routeParsed := context.GetParsedRoute() + method := context.GetMethod(inst) + route := context.GetRoute(inst) + ip := context.GetIp(inst) + rateLimitGroup := context.GetRateLimitGroup(inst) + routeParsed := context.GetParsedRoute(inst) if method == "" || route == "" { return "" } - rateLimitingStatus := grpc.GetRateLimitingStatus(server, method, route, routeParsed, userId, ip, rateLimitGroup, 10*time.Millisecond) + rateLimitingStatus := grpc.GetRateLimitingStatus(inst, server, method, route, routeParsed, userId, ip, rateLimitGroup, 10*time.Millisecond) if rateLimitingStatus != nil && rateLimitingStatus.Block { - context.ContextSetIsEndpointRateLimited() - log.Infof("Request made from IP \"%s\" is ratelimited by \"%s\"!", ip, rateLimitingStatus.Trigger) + context.ContextSetIsEndpointRateLimited(inst) + log.Infof(inst, "Request made from IP \"%s\" is ratelimited by \"%s\"!", ip, rateLimitingStatus.Trigger) return GetAction("store", "ratelimited", rateLimitingStatus.Trigger, "configured rate limit exceeded by current ip", ip, 429) } } @@ -76,55 +77,57 @@ func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { } func OnGetAutoBlockingStatus(inst *instance.RequestProcessorInstance) string { - log.Debugf("OnGetAutoBlockingStatus called!") + log.Debugf(inst, "OnGetAutoBlockingStatus called!") server := inst.GetCurrentServer() if server == nil { return "" } - method := context.GetMethod() - route := context.GetParsedRoute() + method := context.GetMethod(inst) + route := context.GetParsedRoute(inst) if method == "" || route == "" { return "" } - ip := context.GetIp() - userAgent := context.GetUserAgent() + ip := context.GetIp(inst) + userAgent := context.GetUserAgent(inst) - if !context.IsEndpointIpAllowed() { - log.Infof("IP \"%s\" is not allowed to access this endpoint!", ip) + if !context.IsEndpointIpAllowed(inst) { + log.Infof(inst, "IP \"%s\" is not allowed to access this endpoint!", ip) return GetAction("exit", "blocked", "ip", "not allowed by config to access this endpoint", ip, 403) } - if context.IsIpBypassed() { - log.Infof("IP \"%s\" is bypassed! Skipping additional checks...", ip) + if context.IsIpBypassed(inst) { + log.Infof(inst, "IP \"%s\" is bypassed! Skipping additional checks...", ip) return "" } - if !utils.IsIpAllowed(server, ip) { - log.Infof("IP \"%s\" is not found in allow lists!", ip) + if !utils.IsIpAllowed(inst, server, ip) { + log.Infof(inst, "IP \"%s\" is not found in allow lists!", ip) return GetAction("exit", "blocked", "ip", "not in allow lists", ip, 403) } - if ipMonitored, ipMonitoredMatches := utils.IsIpMonitored(server, ip); ipMonitored { - log.Infof("IP \"%s\" found in monitored lists: %v!", ip, ipMonitoredMatches) - go grpc.OnMonitoredIpMatch(server, ipMonitoredMatches) + threadID := inst.GetThreadID() + + if ipMonitored, ipMonitoredMatches := utils.IsIpMonitored(inst, server, ip); ipMonitored { + log.Infof(inst, "IP \"%s\" found in monitored lists: %v!", ip, ipMonitoredMatches) + go grpc.OnMonitoredIpMatch(threadID, server, ipMonitoredMatches) } - if ipBlocked, ipBlockedMatches := utils.IsIpBlocked(server, ip); ipBlocked { - log.Infof("IP \"%s\" found in blocked lists: %v!", ip, ipBlockedMatches) - go grpc.OnMonitoredIpMatch(server, ipBlockedMatches) + if ipBlocked, ipBlockedMatches := utils.IsIpBlocked(inst, server, ip); ipBlocked { + log.Infof(inst, "IP \"%s\" found in blocked lists: %v!", ip, ipBlockedMatches) + go grpc.OnMonitoredIpMatch(threadID, server, ipBlockedMatches) return GetAction("exit", "blocked", "ip", ipBlockedMatches[0].Description, ip, 403) } if userAgentMonitored, userAgentMonitoredDescriptions := utils.IsUserAgentMonitored(server, userAgent); userAgentMonitored { - log.Infof("User Agent \"%s\" found in monitored lists: %v!", userAgent, userAgentMonitoredDescriptions) - go grpc.OnMonitoredUserAgentMatch(server, userAgentMonitoredDescriptions) + log.Infof(inst, "User Agent \"%s\" found in monitored lists: %v!", userAgent, userAgentMonitoredDescriptions) + go grpc.OnMonitoredUserAgentMatch(threadID, server, userAgentMonitoredDescriptions) } if userAgentBlocked, userAgentBlockedDescriptions := utils.IsUserAgentBlocked(server, userAgent); userAgentBlocked { - log.Infof("User Agent \"%s\" found in blocked lists: %v!", userAgent, userAgentBlockedDescriptions) - go grpc.OnMonitoredUserAgentMatch(server, userAgentBlockedDescriptions) + log.Infof(inst, "User Agent \"%s\" found in blocked lists: %v!", userAgent, userAgentBlockedDescriptions) + go grpc.OnMonitoredUserAgentMatch(threadID, server, userAgentBlockedDescriptions) description := "unknown" if len(userAgentBlockedDescriptions) > 0 { diff --git a/lib/request-processor/handle_path_traversal.go b/lib/request-processor/handle_path_traversal.go index d56728f4f..aea107ec2 100644 --- a/lib/request-processor/handle_path_traversal.go +++ b/lib/request-processor/handle_path_traversal.go @@ -9,28 +9,28 @@ import ( ) func OnPrePathAccessed(inst *instance.RequestProcessorInstance) string { - filename := context.GetFilename() - filename2 := context.GetFilename2() - operation := context.GetFunctionName() + filename := context.GetFilename(inst) + filename2 := context.GetFilename2(inst) + operation := context.GetFunctionName(inst) if filename == "" || operation == "" { return "" } - if context.IsEndpointProtectionTurnedOff() { - log.Infof("Protection is turned off -> will not run detection logic!") + if context.IsEndpointProtectionTurnedOff(inst) { + log.Infof(inst, "Protection is turned off -> will not run detection logic!") return "" } - res := path_traversal.CheckContextForPathTraversal(filename, operation, true) + res := path_traversal.CheckContextForPathTraversal(inst, filename, operation, true) if res != nil { - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } if filename2 != "" { - res = path_traversal.CheckContextForPathTraversal(filename2, operation, true) + res = path_traversal.CheckContextForPathTraversal(inst, filename2, operation, true) if res != nil { - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } } return "" diff --git a/lib/request-processor/handle_rate_limit_group_event.go b/lib/request-processor/handle_rate_limit_group_event.go index 2527b9abe..56c88ed13 100644 --- a/lib/request-processor/handle_rate_limit_group_event.go +++ b/lib/request-processor/handle_rate_limit_group_event.go @@ -7,8 +7,8 @@ import ( ) func OnRateLimitGroupEvent(inst *instance.RequestProcessorInstance) string { - context.ContextSetRateLimitGroup() - group := context.GetRateLimitGroup() - log.Infof("Got rate limit group: %s", group) + context.ContextSetRateLimitGroup(inst) + group := context.GetRateLimitGroup(inst) + log.Infof(inst, "Got rate limit group: %s", group) return "" } diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 564e78c5a..763ebd041 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -12,7 +12,7 @@ import ( ) func OnPreRequest(inst *instance.RequestProcessorInstance) string { - context.Clear() + context.Clear(inst) return "" } @@ -21,7 +21,7 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - log.Info("[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) + log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs if !params.IsIpBypassed { params.IsWebScanner = webscanner.IsWebScanner(params.Method, params.Route, params.QueryParsed) @@ -31,33 +31,29 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - log.Info("[RSHUTDOWN] Got API spec: ", params.APISpec) + log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got API spec: ", params.APISpec) grpc.OnRequestShutdown(params) } func OnPostRequest(inst *instance.RequestProcessorInstance) string { - server := inst.GetCurrentServer() - if server == nil { - return "" - } - params := RequestShutdownParams{ - Server: server, - Method: context.GetMethod(), - Route: context.GetRoute(), - RouteParsed: context.GetParsedRoute(), - StatusCode: context.GetStatusCode(), - User: context.GetUserId(), - UserAgent: context.GetUserAgent(), - IP: context.GetIp(), - RateLimitGroup: context.GetRateLimitGroup(), - RateLimited: context.IsEndpointRateLimited(), - QueryParsed: context.GetQueryParsed(), - IsIpBypassed: context.IsIpBypassed(), - APISpec: api_discovery.GetApiInfo(server), // Also needs context, must be called before Clear() + ThreadID: inst.GetThreadID(), + Token: inst.GetCurrentToken(), + Method: context.GetMethod(inst), + Route: context.GetRoute(inst), + RouteParsed: context.GetParsedRoute(inst), + StatusCode: context.GetStatusCode(inst), + User: context.GetUserId(inst), + UserAgent: context.GetUserAgent(inst), + IP: context.GetIp(inst), + RateLimitGroup: context.GetRateLimitGroup(inst), + RateLimited: context.IsEndpointRateLimited(inst), + QueryParsed: context.GetQueryParsed(inst), + IsIpBypassed: context.IsIpBypassed(inst), + APISpec: api_discovery.GetApiInfo(inst, inst.GetCurrentServer()), } - context.Clear() + context.Clear(inst) go func() { OnRequestShutdownReporting(params) diff --git a/lib/request-processor/handle_shell_execution.go b/lib/request-processor/handle_shell_execution.go index 7e2286f3f..58a2c741b 100644 --- a/lib/request-processor/handle_shell_execution.go +++ b/lib/request-processor/handle_shell_execution.go @@ -9,22 +9,22 @@ import ( ) func OnPreShellExecuted(inst *instance.RequestProcessorInstance) string { - cmd := context.GetCmd() - operation := context.GetFunctionName() + cmd := context.GetCmd(inst) + operation := context.GetFunctionName(inst) if cmd == "" { return "" } - log.Info("Got shell command: ", cmd) + log.Info(inst, "Got shell command: ", cmd) - if context.IsEndpointProtectionTurnedOff() { - log.Infof("Protection is turned off -> will not run detection logic!") + if context.IsEndpointProtectionTurnedOff(inst) { + log.Infof(inst, "Protection is turned off -> will not run detection logic!") return "" } - res := shell_injection.CheckContextForShellInjection(cmd, operation) + res := shell_injection.CheckContextForShellInjection(inst, cmd, operation) if res != nil { - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } return "" } diff --git a/lib/request-processor/handle_sql_queries.go b/lib/request-processor/handle_sql_queries.go index babb3c70d..45dc2cc75 100644 --- a/lib/request-processor/handle_sql_queries.go +++ b/lib/request-processor/handle_sql_queries.go @@ -9,21 +9,21 @@ import ( ) func OnPreSqlQueryExecuted(inst *instance.RequestProcessorInstance) string { - query := context.GetSqlQuery() - dialect := context.GetSqlDialect() - operation := context.GetFunctionName() + query := context.GetSqlQuery(inst) + dialect := context.GetSqlDialect(inst) + operation := context.GetFunctionName(inst) if query == "" || dialect == "" { return "" } - if context.IsEndpointProtectionTurnedOff() { - log.Infof("Protection is turned off -> will not run detection logic!") + if context.IsEndpointProtectionTurnedOff(inst) { + log.Infof(inst, "Protection is turned off -> will not run detection logic!") return "" } - res := sql_injection.CheckContextForSqlInjection(query, operation, dialect) + res := sql_injection.CheckContextForSqlInjection(inst, query, operation, dialect) if res != nil { - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } return "" } diff --git a/lib/request-processor/handle_urls.go b/lib/request-processor/handle_urls.go index f0acdd3a7..8e97c49b0 100644 --- a/lib/request-processor/handle_urls.go +++ b/lib/request-processor/handle_urls.go @@ -20,20 +20,20 @@ All these checks first verify if the hostname was provided via user input. Protects both curl and fopen wrapper functions (file_get_contents, etc...). */ func OnPreOutgoingRequest(inst *instance.RequestProcessorInstance) string { - if context.IsEndpointProtectionTurnedOff() { - log.Infof("Protection is turned off -> will not run detection logic!") + if context.IsEndpointProtectionTurnedOff(inst) { + log.Infof(inst, "Protection is turned off -> will not run detection logic!") return "" } - hostname, port := context.GetOutgoingRequestHostnameAndPort() - operation := context.GetFunctionName() + hostname, port := context.GetOutgoingRequestHostnameAndPort(inst) + operation := context.GetFunctionName(inst) - res := ssrf.CheckContextForSSRF(hostname, port, operation) + res := ssrf.CheckContextForSSRF(inst, hostname, port, operation) if res != nil { - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } - log.Info("[BEFORE] Got domain: ", hostname) + log.Info(inst, "[BEFORE] Got domain: ", hostname) //TODO: check if domain is blacklisted return "" } @@ -55,49 +55,50 @@ All these checks first verify if the hostname was provided via user input. Protects curl. */ func OnPostOutgoingRequest(inst *instance.RequestProcessorInstance) string { - defer context.ResetEventContext() + defer context.ResetEventContext(inst) - hostname, port := context.GetOutgoingRequestHostnameAndPort() - effectiveHostname, effectivePort := context.GetOutgoingRequestEffectiveHostnameAndPort() - resolvedIp := context.GetOutgoingRequestResolvedIp() + hostname, port := context.GetOutgoingRequestHostnameAndPort(inst) + effectiveHostname, effectivePort := context.GetOutgoingRequestEffectiveHostnameAndPort(inst) + resolvedIp := context.GetOutgoingRequestResolvedIp(inst) if hostname == "" { return "" } - log.Info("[AFTER] Got domain: ", hostname, " port: ", port) + log.Info(inst, "[AFTER] Got domain: ", hostname, " port: ", port) server := inst.GetCurrentServer() if server != nil { - go grpc.OnDomain(server, hostname, port) + threadID := inst.GetThreadID() + go grpc.OnDomain(threadID, server, hostname, port) if effectiveHostname != hostname { - go grpc.OnDomain(server, effectiveHostname, effectivePort) + go grpc.OnDomain(threadID, server, effectiveHostname, effectivePort) } } - if context.IsEndpointProtectionTurnedOff() { - log.Infof("Protection is turned off -> will not run detection logic!") + if context.IsEndpointProtectionTurnedOff(inst) { + log.Infof(inst, "Protection is turned off -> will not run detection logic!") return "" } - if ssrf.IsRequestToItself(effectiveHostname, effectivePort) { - log.Infof("Request to itself detected -> will not run detection logic!") + if ssrf.IsRequestToItself(inst, effectiveHostname, effectivePort) { + log.Infof(inst, "Request to itself detected -> will not run detection logic!") return "" } - res := ssrf.CheckResolvedIpForSSRF(resolvedIp) + res := ssrf.CheckResolvedIpForSSRF(inst, resolvedIp) if effectiveHostname != hostname { - log.Infof("EffectiveHostname \"%s\" is different than Hostname \"%s\"!", effectiveHostname, hostname) + log.Infof(inst, "EffectiveHostname \"%s\" is different than Hostname \"%s\"!", effectiveHostname, hostname) // After the request was made, the effective hostname is different that the initially requested one (redirects) if res == nil { // We double check here for SSRF on the effective hostname because some sinks might not provide the resolved IP address - res = ssrf.CheckEffectiveHostnameForSSRF(effectiveHostname) + res = ssrf.CheckEffectiveHostnameForSSRF(inst, effectiveHostname) } } if res != nil { /* Throw exception to PHP layer if blocking is enabled -> Response content is not returned to the PHP code */ - return attack.ReportAttackDetected(res) + return attack.ReportAttackDetected(res, inst) } return "" } diff --git a/lib/request-processor/handle_user_event.go b/lib/request-processor/handle_user_event.go index 55326e098..e68f28c11 100644 --- a/lib/request-processor/handle_user_event.go +++ b/lib/request-processor/handle_user_event.go @@ -8,11 +8,11 @@ import ( ) func OnUserEvent(inst *instance.RequestProcessorInstance) string { - id := context.GetUserId() - username := context.GetUserName() - ip := context.GetIp() + id := context.GetUserId(inst) + username := context.GetUserName(inst) + ip := context.GetIp(inst) - log.Infof("Got user event!") + log.Infof(inst, "Got user event!") if id == "" || ip == "" { return "" @@ -22,6 +22,7 @@ func OnUserEvent(inst *instance.RequestProcessorInstance) string { if server == nil { return "" } - go grpc.OnUserEvent(server, id, username, ip) + threadID := inst.GetThreadID() // Capture threadID before goroutine + go grpc.OnUserEvent(threadID, server, id, username, ip) return "" } diff --git a/lib/request-processor/helpers/resolveHostname.go b/lib/request-processor/helpers/resolveHostname.go index ba064e284..dc9eae057 100644 --- a/lib/request-processor/helpers/resolveHostname.go +++ b/lib/request-processor/helpers/resolveHostname.go @@ -2,6 +2,7 @@ package helpers import ( "context" + "main/instance" "main/log" "net" "time" @@ -11,13 +12,13 @@ import ( This function tries to resolve the hostname to a private IP adress, if possible. It does this by calling DNS resolution from the OS (getaddrinfo for Linux). */ -func ResolveHostname(hostname string) []string { +func ResolveHostname(inst *instance.RequestProcessorInstance, hostname string) []string { ctx, cancel := context.WithTimeout(context.Background(), 100*time.Millisecond) defer cancel() resolvedIps, err := net.DefaultResolver.LookupHost(ctx, hostname) if err != nil { - log.Errorf("Failed to resolve hostname %s: %v", hostname, err) + log.Errorf(inst, "Failed to resolve hostname %s: %v", hostname, err) // If timeout is reached or the OS lookup fail, return an emtpy list of resolved IPs return []string{} } diff --git a/lib/request-processor/instance/wrapper.go b/lib/request-processor/instance/wrapper.go index 1e4198102..c92afcc5f 100644 --- a/lib/request-processor/instance/wrapper.go +++ b/lib/request-processor/instance/wrapper.go @@ -2,7 +2,6 @@ package instance import ( . "main/aikido_types" - "main/context" "sync" "unsafe" ) @@ -13,7 +12,7 @@ import ( type RequestProcessorInstance struct { CurrentToken string CurrentServer *ServerData - RequestContext context.RequestContextData + threadID uint64 // CACHED: OS thread ID cached at RINIT for fast context lookups ContextInstance unsafe.Pointer // For context callbacks ContextCallback unsafe.Pointer // C function pointer, must be per-instance in ZTS @@ -24,10 +23,9 @@ type RequestProcessorInstance struct { // NewRequestProcessorInstance creates an instance. Pass isZTS=true for FrankenPHP. func NewRequestProcessorInstance(isZTS bool) *RequestProcessorInstance { return &RequestProcessorInstance{ - CurrentToken: "", - CurrentServer: nil, - RequestContext: context.RequestContextData{}, - isZTS: isZTS, + CurrentToken: "", + CurrentServer: nil, + isZTS: isZTS, } } @@ -63,46 +61,46 @@ func (i *RequestProcessorInstance) GetCurrentToken() string { return i.CurrentToken } -func (i *RequestProcessorInstance) SetRequestContext(ctx context.RequestContextData) { +func (i *RequestProcessorInstance) IsInitialized() bool { if i.isZTS { i.mu.Lock() defer i.mu.Unlock() } - i.RequestContext = ctx + return i.CurrentServer != nil +} + +func (i *RequestProcessorInstance) IsZTS() bool { + return i.isZTS } -func (i *RequestProcessorInstance) GetRequestContext() *context.RequestContextData { +func (i *RequestProcessorInstance) SetContextCallback(callback unsafe.Pointer) { if i.isZTS { i.mu.Lock() defer i.mu.Unlock() } - return &i.RequestContext + i.ContextCallback = callback } -func (i *RequestProcessorInstance) IsInitialized() bool { +func (i *RequestProcessorInstance) GetContextCallback() unsafe.Pointer { if i.isZTS { i.mu.Lock() defer i.mu.Unlock() } - return i.CurrentServer != nil -} - -func (i *RequestProcessorInstance) IsZTS() bool { - return i.isZTS + return i.ContextCallback } -func (i *RequestProcessorInstance) SetContextCallback(callback unsafe.Pointer) { +func (i *RequestProcessorInstance) SetThreadID(tid uint64) { if i.isZTS { i.mu.Lock() defer i.mu.Unlock() } - i.ContextCallback = callback + i.threadID = tid } -func (i *RequestProcessorInstance) GetContextCallback() unsafe.Pointer { +func (i *RequestProcessorInstance) GetThreadID() uint64 { if i.isZTS { i.mu.Lock() defer i.mu.Unlock() } - return i.ContextCallback + return i.threadID } diff --git a/lib/request-processor/log/log.go b/lib/request-processor/log/log.go index 019bd06b0..a2110538b 100644 --- a/lib/request-processor/log/log.go +++ b/lib/request-processor/log/log.go @@ -3,43 +3,25 @@ package log import ( "errors" "fmt" - "log" + "main/globals" + "main/instance" "os" - "sync" "time" ) -type LogLevel int - -const ( - DebugLevel LogLevel = iota - InfoLevel - WarnLevel - ErrorLevel -) - -var ( - currentLogLevel = ErrorLevel - Logger = log.New(os.Stdout, "", 0) - cliLogging = true - logFilePath = "" - logMutex sync.RWMutex -) -var LogFile *os.File - type AikidoFormatter struct{} -func (f *AikidoFormatter) Format(level LogLevel, message string) string { +func (f *AikidoFormatter) Format(level globals.LogLevel, threadID uint64, message string) string { var levelStr string switch level { - case DebugLevel: + case globals.LogDebugLevel: levelStr = "DEBUG" - case InfoLevel: + case globals.LogInfoLevel: levelStr = "INFO" - case WarnLevel: + case globals.LogWarnLevel: levelStr = "WARN" - case ErrorLevel: + case globals.LogErrorLevel: levelStr = "ERROR" default: return "invalid log level" @@ -49,137 +31,206 @@ func (f *AikidoFormatter) Format(level LogLevel, message string) string { message = message[:1024] + "... [truncated]" } - logMutex.RLock() - isCliLogging := cliLogging - logMutex.RUnlock() + globals.LogMutex.RLock() + isCliLogging := globals.CliLogging + globals.LogMutex.RUnlock() if isCliLogging { - return fmt.Sprintf("[AIKIDO][%s] %s\n", levelStr, message) + return fmt.Sprintf("[AIKIDO][%s][tid:%d] %s\n", levelStr, threadID, message) } - return fmt.Sprintf("[AIKIDO][%s][%s] %s\n", levelStr, time.Now().Format("15:04:05"), message) + return fmt.Sprintf("[AIKIDO][%s][tid:%d][%s] %s\n", levelStr, threadID, time.Now().Format("15:04:05"), message) } func initLogFile() { - logMutex.Lock() - defer logMutex.Unlock() + globals.LogMutex.Lock() + defer globals.LogMutex.Unlock() - if cliLogging { + if globals.CliLogging { return } - if LogFile != nil { + if globals.LogFile != nil { return } var err error - LogFile, err = os.OpenFile(logFilePath, os.O_CREATE|os.O_WRONLY, 0666) + globals.LogFile, err = os.OpenFile(globals.LogFilePath, os.O_CREATE|os.O_WRONLY, 0666) if err != nil { return } - Logger.SetOutput(LogFile) + globals.Logger.SetOutput(globals.LogFile) +} + +func logMessage(inst *instance.RequestProcessorInstance, level globals.LogLevel, args ...interface{}) { + globals.LogMutex.RLock() + lvl := globals.CurrentLogLevel + globals.LogMutex.RUnlock() + + if level >= lvl { + initLogFile() + formatter := &AikidoFormatter{} + message := fmt.Sprint(args...) + threadID := uint64(0) + if inst != nil { + threadID = inst.GetThreadID() + } + formattedMessage := formatter.Format(level, threadID, message) + globals.Logger.Print(formattedMessage) + } +} + +func logMessagef(inst *instance.RequestProcessorInstance, level globals.LogLevel, format string, args ...interface{}) { + globals.LogMutex.RLock() + lvl := globals.CurrentLogLevel + globals.LogMutex.RUnlock() + + if level >= lvl { + initLogFile() + formatter := &AikidoFormatter{} + message := fmt.Sprintf(format, args...) + threadID := uint64(0) + if inst != nil { + threadID = inst.GetThreadID() + } + formattedMessage := formatter.Format(level, threadID, message) + globals.Logger.Print(formattedMessage) + } +} + +func Debug(inst *instance.RequestProcessorInstance, args ...interface{}) { + logMessage(inst, globals.LogDebugLevel, args...) +} + +func Info(inst *instance.RequestProcessorInstance, args ...interface{}) { + logMessage(inst, globals.LogInfoLevel, args...) +} + +func Warn(inst *instance.RequestProcessorInstance, args ...interface{}) { + logMessage(inst, globals.LogWarnLevel, args...) +} + +func Error(inst *instance.RequestProcessorInstance, args ...interface{}) { + logMessage(inst, globals.LogErrorLevel, args...) +} + +func Debugf(inst *instance.RequestProcessorInstance, format string, args ...interface{}) { + logMessagef(inst, globals.LogDebugLevel, format, args...) +} + +func Infof(inst *instance.RequestProcessorInstance, format string, args ...interface{}) { + logMessagef(inst, globals.LogInfoLevel, format, args...) +} + +func Warnf(inst *instance.RequestProcessorInstance, format string, args ...interface{}) { + logMessagef(inst, globals.LogWarnLevel, format, args...) +} + +func Errorf(inst *instance.RequestProcessorInstance, format string, args ...interface{}) { + logMessagef(inst, globals.LogErrorLevel, format, args...) } -func logMessage(level LogLevel, args ...interface{}) { - logMutex.RLock() - lvl := currentLogLevel - logMutex.RUnlock() +// Direct threadID logging (for goroutines where inst cannot be safely passed) +func logMessageWithThreadID(threadID uint64, level globals.LogLevel, args ...interface{}) { + globals.LogMutex.RLock() + lvl := globals.CurrentLogLevel + globals.LogMutex.RUnlock() if level >= lvl { initLogFile() formatter := &AikidoFormatter{} message := fmt.Sprint(args...) - formattedMessage := formatter.Format(level, message) - Logger.Print(formattedMessage) + formattedMessage := formatter.Format(level, threadID, message) + globals.Logger.Print(formattedMessage) } } -func logMessagef(level LogLevel, format string, args ...interface{}) { - logMutex.RLock() - lvl := currentLogLevel - logMutex.RUnlock() +func logMessagefWithThreadID(threadID uint64, level globals.LogLevel, format string, args ...interface{}) { + globals.LogMutex.RLock() + lvl := globals.CurrentLogLevel + globals.LogMutex.RUnlock() if level >= lvl { initLogFile() formatter := &AikidoFormatter{} message := fmt.Sprintf(format, args...) - formattedMessage := formatter.Format(level, message) - Logger.Print(formattedMessage) + formattedMessage := formatter.Format(level, threadID, message) + globals.Logger.Print(formattedMessage) } } -func Debug(args ...interface{}) { - logMessage(DebugLevel, args...) +func DebugWithThreadID(threadID uint64, args ...interface{}) { + logMessageWithThreadID(threadID, globals.LogDebugLevel, args...) } -func Info(args ...interface{}) { - logMessage(InfoLevel, args...) +func InfoWithThreadID(threadID uint64, args ...interface{}) { + logMessageWithThreadID(threadID, globals.LogInfoLevel, args...) } -func Warn(args ...interface{}) { - logMessage(WarnLevel, args...) +func WarnWithThreadID(threadID uint64, args ...interface{}) { + logMessageWithThreadID(threadID, globals.LogWarnLevel, args...) } -func Error(args ...interface{}) { - logMessage(ErrorLevel, args...) +func ErrorWithThreadID(threadID uint64, args ...interface{}) { + logMessageWithThreadID(threadID, globals.LogErrorLevel, args...) } -func Debugf(format string, args ...interface{}) { - logMessagef(DebugLevel, format, args...) +func DebugfWithThreadID(threadID uint64, format string, args ...interface{}) { + logMessagefWithThreadID(threadID, globals.LogDebugLevel, format, args...) } -func Infof(format string, args ...interface{}) { - logMessagef(InfoLevel, format, args...) +func InfofWithThreadID(threadID uint64, format string, args ...interface{}) { + logMessagefWithThreadID(threadID, globals.LogInfoLevel, format, args...) } -func Warnf(format string, args ...interface{}) { - logMessagef(WarnLevel, format, args...) +func WarnfWithThreadID(threadID uint64, format string, args ...interface{}) { + logMessagefWithThreadID(threadID, globals.LogWarnLevel, format, args...) } -func Errorf(format string, args ...interface{}) { - logMessagef(ErrorLevel, format, args...) +func ErrorfWithThreadID(threadID uint64, format string, args ...interface{}) { + logMessagefWithThreadID(threadID, globals.LogErrorLevel, format, args...) } // SetLogLevel changes the current log level (thread-safe) func SetLogLevel(level string) error { - var newLevel LogLevel + var newLevel globals.LogLevel switch level { case "DEBUG": - newLevel = DebugLevel + newLevel = globals.LogDebugLevel case "INFO": - newLevel = InfoLevel + newLevel = globals.LogInfoLevel case "WARN": - newLevel = WarnLevel + newLevel = globals.LogWarnLevel case "ERROR": - newLevel = ErrorLevel + newLevel = globals.LogErrorLevel default: return errors.New("invalid log level") } - logMutex.Lock() - defer logMutex.Unlock() - currentLogLevel = newLevel + globals.LogMutex.Lock() + defer globals.LogMutex.Unlock() + globals.CurrentLogLevel = newLevel return nil } func Init(diskLogs bool) { - logMutex.Lock() - defer logMutex.Unlock() + globals.LogMutex.Lock() + defer globals.LogMutex.Unlock() if !diskLogs { - cliLogging = true + globals.CliLogging = true return } - cliLogging = false + globals.CliLogging = false currentTime := time.Now() timeStr := currentTime.Format("20060102150405") - logFilePath = fmt.Sprintf("/var/log/aikido-"+globals.Version+"/aikido-request-processor-%s-%d.log", timeStr, os.Getpid()) + globals.LogFilePath = fmt.Sprintf("/var/log/aikido-"+globals.Version+"/aikido-request-processor-%s-%d.log", timeStr, os.Getpid()) } func Uninit() { - logMutex.Lock() - defer logMutex.Unlock() + globals.LogMutex.Lock() + defer globals.LogMutex.Unlock() - if LogFile != nil { - LogFile.Close() - LogFile = nil + if globals.LogFile != nil { + globals.LogFile.Close() + globals.LogFile = nil } } diff --git a/lib/request-processor/main.go b/lib/request-processor/main.go index dc04a8118..9899d42f5 100644 --- a/lib/request-processor/main.go +++ b/lib/request-processor/main.go @@ -51,28 +51,28 @@ func DestroyInstance(threadID uint64) { //export RequestProcessorInit func RequestProcessorInit(instancePtr unsafe.Pointer, initJson string) (initOk bool) { + inst := instance.GetInstance(instancePtr) defer func() { if r := recover(); r != nil { - log.Warn("Recovered from panic:", r) + log.Warn(inst, "Recovered from panic:", r) initOk = false } }() - inst := instance.GetInstance(instancePtr) if inst == nil { return false } config.Init(inst, initJson) - log.Debugf("Aikido Request Processor v%s (server PID: %d, request processor PID: %d) started in \"%s\" mode!", + log.Debugf(inst, "Aikido Request Processor v%s (server PID: %d, request processor PID: %d) started in \"%s\" mode!", globals.Version, globals.EnvironmentConfig.ServerPID, globals.EnvironmentConfig.RequestProcessorPID, globals.EnvironmentConfig.PlatformName, ) - log.Debugf("Init data: %s", initJson) - log.Debugf("Started with token: \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(inst.GetCurrentToken())) + log.Debugf(inst, "Init data: %s", initJson) + log.Debugf(inst, "Started with token: \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(inst.GetCurrentToken())) if globals.EnvironmentConfig.PlatformName != "cli" { grpc.Init() @@ -83,17 +83,13 @@ func RequestProcessorInit(instancePtr unsafe.Pointer, initJson string) (initOk b grpc.StartCloudConfigRoutine() } if !zen_internals.Init() { - log.Error("Error initializing zen-internals library!") + log.Error(inst, "Error initializing zen-internals library!") return false } return true } -func GoContextCallback(contextId int) string { - // Get the instance from the context package - // This works because context.Init stores the instance pointer - instPtr := context.GetInstancePtr() - inst := instance.GetInstance(instPtr) +func GoContextCallback(inst *instance.RequestProcessorInstance, contextId int) string { if inst == nil { return "" } @@ -123,14 +119,14 @@ func GoContextCallback(contextId int) string { //export RequestProcessorContextInit func RequestProcessorContextInit(instancePtr unsafe.Pointer, contextCallback C.ContextCallback) (initOk bool) { + inst := instance.GetInstance(instancePtr) defer func() { if r := recover(); r != nil { - log.Warn("Recovered from panic:", r) + log.Warn(inst, "Recovered from panic:", r) initOk = false } }() - inst := instance.GetInstance(instancePtr) if inst == nil { return false } @@ -144,20 +140,19 @@ func RequestProcessorContextInit(instancePtr unsafe.Pointer, contextCallback C.C */ //export RequestProcessorConfigUpdate func RequestProcessorConfigUpdate(instancePtr unsafe.Pointer, configJson string) (initOk bool) { + inst := instance.GetInstance(instancePtr) defer func() { if r := recover(); r != nil { - log.Warn("Recovered from panic:", r) + log.Warn(inst, "Recovered from panic:", r) initOk = false } }() - inst := instance.GetInstance(instancePtr) - if inst == nil { return false } - log.Debugf("Reloading Aikido config...") + log.Debugf(inst, "Reloading Aikido config...") conf := AikidoConfigData{} reloadResult := config.ReloadAikidoConfig(inst, &conf, configJson) @@ -184,14 +179,14 @@ func RequestProcessorConfigUpdate(instancePtr unsafe.Pointer, configJson string) //export RequestProcessorOnEvent func RequestProcessorOnEvent(instancePtr unsafe.Pointer, eventId int) (outputJson *C.char) { + inst := instance.GetInstance(instancePtr) defer func() { if r := recover(); r != nil { - log.Warn("Recovered from panic:", r) + log.Warn(inst, "Recovered from panic:", r) outputJson = nil } }() - inst := instance.GetInstance(instancePtr) if inst == nil { return nil } @@ -235,19 +230,21 @@ func RequestProcessorReportStats(instancePtr unsafe.Pointer, sink, kind string, clonedTimings := make([]int64, len(timings)) copy(clonedTimings, timings) - go grpc.OnMonitoredSinkStats(inst.GetCurrentServer(), strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) + threadID := inst.GetThreadID() + go grpc.OnMonitoredSinkStats(threadID, inst.GetCurrentServer(), strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) } //export RequestProcessorUninit func RequestProcessorUninit(instancePtr unsafe.Pointer) { - log.Debug("Uninit: {}") + inst := instance.GetInstance(instancePtr) + log.Debug(inst, "Uninit: {}") zen_internals.Uninit() if globals.EnvironmentConfig.PlatformName != "cli" { grpc.Uninit() } - log.Debugf("Aikido Request Processor v%s stopped!", globals.Version) + log.Debugf(inst, "Aikido Request Processor v%s stopped!", globals.Version) config.Uninit() } diff --git a/lib/request-processor/utils/utils.go b/lib/request-processor/utils/utils.go index c82e99256..23b278dba 100644 --- a/lib/request-processor/utils/utils.go +++ b/lib/request-processor/utils/utils.go @@ -3,6 +3,7 @@ package utils import ( "fmt" "main/helpers" + "main/instance" "main/log" "net" "net/netip" @@ -195,7 +196,7 @@ func isLocalhost(ip string) bool { return parsedIP.IsLoopback() } -func IsIpInSet(ipSet *netipx.IPSet, ip string) int { +func IsIpInSet(inst *instance.RequestProcessorInstance, ipSet *netipx.IPSet, ip string) int { if ipSet == nil || ipSet.Equal(&netipx.IPSet{}) { // No IPs configured in the list -> return default value return NoConfig @@ -203,7 +204,7 @@ func IsIpInSet(ipSet *netipx.IPSet, ip string) int { ipAddress, err := netip.ParseAddr(ip) if err != nil { - log.Infof("Invalid ip address: %s\n", ip) + log.Infof(inst, "Invalid ip address: %s\n", ip) return NoConfig } @@ -214,7 +215,7 @@ func IsIpInSet(ipSet *netipx.IPSet, ip string) int { return NotFound } -func IsIpAllowedOnEndpoint(server *ServerData, allowedIps *netipx.IPSet, ip string) int { +func IsIpAllowedOnEndpoint(inst *instance.RequestProcessorInstance, server *ServerData, allowedIps *netipx.IPSet, ip string) int { if server == nil { return NoConfig } @@ -222,17 +223,17 @@ func IsIpAllowedOnEndpoint(server *ServerData, allowedIps *netipx.IPSet, ip stri return Found } - return IsIpInSet(allowedIps, ip) + return IsIpInSet(inst, allowedIps, ip) } -func IsIpBypassed(server *ServerData, ip string) bool { +func IsIpBypassed(inst *instance.RequestProcessorInstance, server *ServerData, ip string) bool { if server == nil { return false } server.CloudConfigMutex.Lock() defer server.CloudConfigMutex.Unlock() - return IsIpInSet(server.CloudConfig.BypassedIps, ip) == Found + return IsIpInSet(inst, server.CloudConfig.BypassedIps, ip) == Found } func getIpFromXForwardedFor(value string) string { @@ -326,7 +327,7 @@ type IpListMatch struct { Description string } -func IsIpInList(ipList map[string]IpList, ip string) (int, []IpListMatch) { +func IsIpInList(inst *instance.RequestProcessorInstance, ipList map[string]IpList, ip string) (int, []IpListMatch) { if len(ipList) == 0 { return NoConfig, []IpListMatch{} } @@ -350,7 +351,7 @@ func IsIpInList(ipList map[string]IpList, ip string) (int, []IpListMatch) { return Found, matches } -func IsIpAllowed(server *ServerData, ip string) bool { +func IsIpAllowed(inst *instance.RequestProcessorInstance, server *ServerData, ip string) bool { server.CloudConfigMutex.Lock() defer server.CloudConfigMutex.Unlock() @@ -358,22 +359,22 @@ func IsIpAllowed(server *ServerData, ip string) bool { return true } - result, _ := IsIpInList(server.CloudConfig.AllowedIps, ip) + result, _ := IsIpInList(inst, server.CloudConfig.AllowedIps, ip) // IP is allowed if it's found in the allowed lists or if the allowed lists are not configured return result == Found || result == NoConfig } -func IsIpBlocked(server *ServerData, ip string) (bool, []IpListMatch) { +func IsIpBlocked(inst *instance.RequestProcessorInstance, server *ServerData, ip string) (bool, []IpListMatch) { server.CloudConfigMutex.Lock() defer server.CloudConfigMutex.Unlock() - result, matches := IsIpInList(server.CloudConfig.BlockedIps, ip) + result, matches := IsIpInList(inst, server.CloudConfig.BlockedIps, ip) return result == Found, matches } -func IsIpMonitored(server *ServerData, ip string) (bool, []IpListMatch) { +func IsIpMonitored(inst *instance.RequestProcessorInstance, server *ServerData, ip string) (bool, []IpListMatch) { server.CloudConfigMutex.Lock() defer server.CloudConfigMutex.Unlock() - result, matches := IsIpInList(server.CloudConfig.MonitoredIps, ip) + result, matches := IsIpInList(inst, server.CloudConfig.MonitoredIps, ip) return result == Found, matches } diff --git a/lib/request-processor/utils/utils_test.go b/lib/request-processor/utils/utils_test.go index 07f6a15a8..44574215e 100644 --- a/lib/request-processor/utils/utils_test.go +++ b/lib/request-processor/utils/utils_test.go @@ -428,7 +428,7 @@ func TestIsIpBlockedByPrefix(t *testing.T) { IpList, _ := BuildIpList("test", []string{"1.2.0.0/16"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "1.2.3.4" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != true { t.Errorf("expected true, got %v", result) } @@ -440,7 +440,7 @@ func TestIsIpBlockedByIp(t *testing.T) { IpList, _ := BuildIpList("test", []string{"1.2.3.4"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "1.2.3.4" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != true { t.Errorf("expected true, got %v", result) } @@ -452,7 +452,7 @@ func TestIsIpNotBlockedByPrefix(t *testing.T) { IpList, _ := BuildIpList("test", []string{"1.2.0.0/16"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2.3.4.5" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != false { t.Errorf("expected false, got %v", result) } @@ -464,7 +464,7 @@ func TestIsIpNotBlockedByIp(t *testing.T) { IpList, _ := BuildIpList("test", []string{"1.2.3.4"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2.3.4.5" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != false { t.Errorf("expected false, got %v", result) } @@ -475,7 +475,7 @@ func TestIsIpv6BlockedByPrefix(t *testing.T) { IpList, _ := BuildIpList("test", []string{"2001:db8::/32"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2001:db8:1234:5678:90ab:cdef:1234:5678" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != true { t.Errorf("expected true, got %v", result) } @@ -487,7 +487,7 @@ func TestIsIpv6BlockedByIp(t *testing.T) { IpList, _ := BuildIpList("test", []string{"2001:db8::1"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2001:db8::1" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != true { t.Errorf("expected true, got %v", result) } @@ -499,7 +499,7 @@ func TestIsIpv6NotBlockedByPrefix(t *testing.T) { IpList, _ := BuildIpList("test", []string{"2001:db8::/32"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2001:db9::1" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != false { t.Errorf("expected false, got %v", result) } @@ -511,7 +511,7 @@ func TestIsIpv6NotBlockedByIp(t *testing.T) { IpList, _ := BuildIpList("test", []string{"2001:db8::1"}) server.CloudConfig.BlockedIps["test"] = *IpList ip := "2001:db8::2" - result, _ := IsIpBlocked(server, ip) + result, _ := IsIpBlocked(nil, server, ip) if result != false { t.Errorf("expected false, got %v", result) } diff --git a/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal.go b/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal.go index 22f02e310..ec830acad 100644 --- a/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal.go +++ b/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal.go @@ -3,16 +3,17 @@ package path_traversal import ( "main/context" "main/helpers" + "main/instance" "main/utils" "strings" ) -func CheckContextForPathTraversal(filename string, operation string, checkPathStart bool) *utils.InterceptorResult { +func CheckContextForPathTraversal(inst *instance.RequestProcessorInstance, filename string, operation string, checkPathStart bool) *utils.InterceptorResult { trimmedFilename := helpers.TrimInvisible(filename) sanitizedPath := SanitizePath(trimmedFilename) for _, source := range context.SOURCES { - mapss := source.CacheGet() + mapss := source.CacheGet(inst) for str, path := range mapss { trimmedInputString := helpers.TrimInvisible(str) diff --git a/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal_test.go b/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal_test.go index 401279f21..500d840c5 100644 --- a/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal_test.go +++ b/lib/request-processor/vulnerabilities/path-traversal/checkContextForPathTraversal_test.go @@ -9,7 +9,7 @@ import ( func TestCheckContextForPathTraversal(t *testing.T) { t.Run("it detects path traversal from body parameter", func(t *testing.T) { - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "remoteAddress": "ip", "method": "POST", "url": "url", @@ -18,7 +18,7 @@ func TestCheckContextForPathTraversal(t *testing.T) { }) operation := "operation" - result := CheckContextForPathTraversal("../file/test.txt", operation, true) + result := CheckContextForPathTraversal(inst, "../file/test.txt", operation, true) if result == nil { t.Errorf("expected result, got nil") @@ -46,14 +46,8 @@ func TestCheckContextForPathTraversal(t *testing.T) { }) t.Run("it does not flag safe operation", func(t *testing.T) { - context.LoadForUnitTests(map[string]string{ - "remoteAddress": "ip", - "method": "POST", - "url": "url", - }) - operation := "path.normalize" - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "url": "/_next/static/RjAvHy_jB1ciRT_xBrSyI/_ssgManifest.js", "method": "GET", "headers": context.GetJsonString(map[string]interface{}{ @@ -78,12 +72,12 @@ func TestCheckContextForPathTraversal(t *testing.T) { "x-forwarded-proto": "http", "x-forwarded-for": "127.0.0.1", }), - "source": "http.createServer", - "cookies": context.GetJsonString(map[string]interface{}{"Phpstorm-8262f4a6": "6a1925f9-2f0e-45ea-8336-a6988d56b1aa"}), - "remoteAddress": "127.0.0.1", - }) + "source": "http.createServer", + "cookies": context.GetJsonString(map[string]interface{}{"Phpstorm-8262f4a6": "6a1925f9-2f0e-45ea-8336-a6988d56b1aa"}), + "remoteAddress": "127.0.0.1", + }) - result := CheckContextForPathTraversal("../../web/spec-extension/cookies", operation, true) + result := CheckContextForPathTraversal(inst, "../../web/spec-extension/cookies", operation, true) if result != nil { t.Errorf("expected nil, got %v", result) } diff --git a/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection.go b/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection.go index 3c971cec9..03cb314a4 100644 --- a/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection.go +++ b/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection.go @@ -3,13 +3,14 @@ package shell_injection import ( "main/context" "main/helpers" + "main/instance" "main/utils" ) -func CheckContextForShellInjection(command string, operation string) *utils.InterceptorResult { +func CheckContextForShellInjection(inst *instance.RequestProcessorInstance, command string, operation string) *utils.InterceptorResult { trimmedCommand := helpers.TrimInvisible(command) for _, source := range context.SOURCES { - mapss := source.CacheGet() + mapss := source.CacheGet(inst) for str, path := range mapss { trimmedInputString := helpers.TrimInvisible(str) diff --git a/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection_test.go b/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection_test.go index cdf48fc78..8d70e25ac 100644 --- a/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection_test.go +++ b/lib/request-processor/vulnerabilities/shell-injection/checkContextForShellInjection_test.go @@ -8,7 +8,7 @@ import ( func TestCheckContextForShellInjection(t *testing.T) { t.Run("it detects shell injection", func(t *testing.T) { - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "remoteAddress": "ip", "method": "POST", "url": "url", @@ -19,7 +19,7 @@ func TestCheckContextForShellInjection(t *testing.T) { "route": "/", }) operation := "child_process.exec" - result := CheckContextForShellInjection("binary --domain www.example`whoami`.com", operation) + result := CheckContextForShellInjection(inst, "binary --domain www.example`whoami`.com", operation) if result == nil { t.Errorf("expected result, got nil") @@ -48,7 +48,7 @@ func TestCheckContextForShellInjection(t *testing.T) { t.Run("it detects shell injection from route params", func(t *testing.T) { operation := "child_process.exec" - context.LoadForUnitTests(map[string]string{ + inst := context.LoadForUnitTests(map[string]string{ "remoteAddress": "ip", "method": "POST", "url": "url", @@ -59,7 +59,7 @@ func TestCheckContextForShellInjection(t *testing.T) { "route": "/", }) - result := CheckContextForShellInjection("binary --domain www.example`whoami`.com", operation) + result := CheckContextForShellInjection(inst, "binary --domain www.example`whoami`.com", operation) if result == nil { t.Errorf("expected result, got nil") diff --git a/lib/request-processor/vulnerabilities/sql-injection/checkContextForSqlInjection.go b/lib/request-processor/vulnerabilities/sql-injection/checkContextForSqlInjection.go index 281a3ee03..c5aaec796 100644 --- a/lib/request-processor/vulnerabilities/sql-injection/checkContextForSqlInjection.go +++ b/lib/request-processor/vulnerabilities/sql-injection/checkContextForSqlInjection.go @@ -3,6 +3,7 @@ package sql_injection import ( "main/context" "main/helpers" + "main/instance" "main/utils" ) @@ -10,12 +11,12 @@ import ( * This function goes over all the different input types in the context and checks * if it's a possible SQL Injection, if so the function returns an InterceptorResult */ -func CheckContextForSqlInjection(sql string, operation string, dialect string) *utils.InterceptorResult { +func CheckContextForSqlInjection(inst *instance.RequestProcessorInstance, sql string, operation string, dialect string) *utils.InterceptorResult { trimmedSql := helpers.TrimInvisible(sql) dialectId := utils.GetSqlDialectFromString(dialect) for _, source := range context.SOURCES { - mapss := source.CacheGet() + mapss := source.CacheGet(inst) for str, path := range mapss { trimmedInputString := helpers.TrimInvisible(str) diff --git a/lib/request-processor/vulnerabilities/ssrf/checkContextForSSRF.go b/lib/request-processor/vulnerabilities/ssrf/checkContextForSSRF.go index 07e4f10bb..d056b8c42 100644 --- a/lib/request-processor/vulnerabilities/ssrf/checkContextForSSRF.go +++ b/lib/request-processor/vulnerabilities/ssrf/checkContextForSSRF.go @@ -3,21 +3,22 @@ package ssrf import ( "main/context" "main/helpers" + "main/instance" "main/utils" ) /* This is called before a request is made to check for SSRF and block the request (not execute it) if SSRF found */ -func CheckContextForSSRF(hostname string, port uint32, operation string) *utils.InterceptorResult { +func CheckContextForSSRF(inst *instance.RequestProcessorInstance, hostname string, port uint32, operation string) *utils.InterceptorResult { trimmedHostname := helpers.TrimInvisible(hostname) // Check if this is a request to the server itself (including HTTP/HTTPS special case) // If so, don't block it as it's not an SSRF attack - if IsRequestToItself(trimmedHostname, port) { + if IsRequestToItself(inst, trimmedHostname, port) { return nil } for _, source := range context.SOURCES { - mapss := source.CacheGet() + mapss := source.CacheGet(inst) for str, path := range mapss { trimmedInputString := helpers.TrimInvisible(str) @@ -37,7 +38,7 @@ func CheckContextForSSRF(hostname string, port uint32, operation string) *utils. return &interceptorResult } - resolvedIpStatus := getResolvedIpStatusForHostname(trimmedHostname) + resolvedIpStatus := getResolvedIpStatusForHostname(inst, trimmedHostname) if resolvedIpStatus != nil { interceptorResult.Metadata["resolvedIp"] = resolvedIpStatus.ip if resolvedIpStatus.isIMDS { @@ -51,10 +52,10 @@ func CheckContextForSSRF(hostname string, port uint32, operation string) *utils. return &interceptorResult } - // Hostname matched in the user input but we did not managed to determine if it's a SSRF attack at this point. - // Storing the matching information (interceptor result) in order to use it once the request completes, - // as at that point we might have more information to determine if SSRF or not. - context.EventContextSetCurrentSsrfInterceptorResult(&interceptorResult) + // Hostname matched in the user input but we did not managed to determine if it's a SSRF attack at this point. + // Storing the matching information (interceptor result) in order to use it once the request completes, + // as at that point we might have more information to determine if SSRF or not. + context.EventContextSetCurrentSsrfInterceptorResult(inst, &interceptorResult) } } } @@ -62,15 +63,15 @@ func CheckContextForSSRF(hostname string, port uint32, operation string) *utils. } /* This is called after the request is made to check for SSRF in the effective hostname - hostname optained after redirects from the PHP library that made the request (curl) */ -func CheckEffectiveHostnameForSSRF(effectiveHostname string) *utils.InterceptorResult { - interceptorResult := context.GetCurrentSsrfInterceptorResult() +func CheckEffectiveHostnameForSSRF(inst *instance.RequestProcessorInstance, effectiveHostname string) *utils.InterceptorResult { + interceptorResult := context.GetCurrentSsrfInterceptorResult(inst) if interceptorResult == nil { // The initially requested hostname was not found in the user input -> no SSRF return nil } interceptorResult.Metadata["effectiveHostname"] = effectiveHostname - resolvedIpStatus := getResolvedIpStatusForHostname(effectiveHostname) + resolvedIpStatus := getResolvedIpStatusForHostname(inst, effectiveHostname) if resolvedIpStatus != nil { interceptorResult.Metadata["resolvedIp"] = resolvedIpStatus.ip if resolvedIpStatus.isIMDS { @@ -87,8 +88,8 @@ func CheckEffectiveHostnameForSSRF(effectiveHostname string) *utils.InterceptorR } /* This is called after the request is made to check for SSRF in the resolvedIP - IP optained from the PHP library that made the request (curl) */ -func CheckResolvedIpForSSRF(resolvedIp string) *utils.InterceptorResult { - interceptorResult := context.GetCurrentSsrfInterceptorResult() +func CheckResolvedIpForSSRF(inst *instance.RequestProcessorInstance, resolvedIp string) *utils.InterceptorResult { + interceptorResult := context.GetCurrentSsrfInterceptorResult(inst) if interceptorResult == nil { // The initially requested hostname was not found in the user input -> no SSRF return nil diff --git a/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus.go b/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus.go index fe98c0108..aadfa03da 100644 --- a/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus.go +++ b/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus.go @@ -1,6 +1,9 @@ package ssrf -import "main/helpers" +import ( + "main/helpers" + "main/instance" +) type ResolvedIpStatus struct { ip string @@ -15,8 +18,8 @@ we expect that for most of the cases, the result will be already cached at the O We do our own DNS resolution, because we want to actually block potential SSRF attacks and we did not find any way to hook PHP's DNS resolution calls. */ -func getResolvedIpStatusForHostname(hostname string) *ResolvedIpStatus { - resolvedIps := helpers.ResolveHostname(hostname) +func getResolvedIpStatusForHostname(inst *instance.RequestProcessorInstance, hostname string) *ResolvedIpStatus { + resolvedIps := helpers.ResolveHostname(inst, hostname) imdsIP := FindIMDSIp(hostname, resolvedIps) if imdsIP != "" { diff --git a/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus_test.go b/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus_test.go index d372fd468..fe1bec06d 100644 --- a/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus_test.go +++ b/lib/request-processor/vulnerabilities/ssrf/getResolvedIpStatus_test.go @@ -14,7 +14,7 @@ func TestResolvedIpStatus(t *testing.T) { } for _, test := range tests { - result := getResolvedIpStatusForHostname(test.hostname) + result := getResolvedIpStatusForHostname(nil, test.hostname) if result == nil { t.Errorf("For hostname '%s' expected DNS resolution to not fail", test.hostname) break diff --git a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go index 7007ac56e..898632d34 100644 --- a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go +++ b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself.go @@ -3,6 +3,7 @@ package ssrf import ( "main/context" "main/helpers" + "main/instance" "net/url" ) @@ -10,16 +11,21 @@ import ( // This includes a special case for HTTP/HTTPS: if the server is running on HTTP (port 80) and makes a request // to HTTPS (port 443) of the same hostname, or vice versa, it's considered a request to itself. // This prevents false positives when a server makes requests to itself via different protocols. -func IsRequestToItself(outboundHostname string, outboundPort uint32) bool { +func IsRequestToItself(inst *instance.RequestProcessorInstance, outboundHostname string, outboundPort uint32) bool { + if inst == nil { + return false + } + + server := inst.GetCurrentServer() + // Check if trust proxy is enabled - // If not enabled, we don't consider requests to iteself as safe - server := context.GetCurrentServer() + // If not enabled, we don't consider requests to itself as safe if server != nil && !server.AikidoConfig.TrustProxy { return false } // Get the current server URL from the incoming request - serverURL := context.GetUrl() + serverURL := context.GetUrl(inst) if serverURL == "" { return false } diff --git a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go index b98bd8237..92ef614a6 100644 --- a/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go +++ b/lib/request-processor/vulnerabilities/ssrf/isRequestToItself_test.go @@ -1,59 +1,59 @@ package ssrf import ( - "main/aikido_types" + . "main/aikido_types" "main/context" + "main/instance" "testing" ) -func setupTestContext(serverURL string, trustProxy bool) func() { +func setupTestContext(serverURL string, trustProxy bool) (*instance.RequestProcessorInstance, func()) { // Setup a mock server with trust proxy setting - testServer := &aikido_types.ServerData{ - AikidoConfig: aikido_types.AikidoConfigData{ + testServer := &ServerData{ + AikidoConfig: AikidoConfigData{ TrustProxy: trustProxy, }, } - // Store original server and restore it later - context.SetTestServer(testServer) - - // Use the proper test context loader - context.LoadForUnitTests(map[string]string{ + // Use the proper test context loader - it returns the mock instance with threadID set + testInst := context.LoadForUnitTests(map[string]string{ "url": serverURL, }) - // Return cleanup function - return func() { + // Set the server on the instance + testInst.SetCurrentServer(testServer) + + // Return instance and cleanup function + return testInst, func() { context.UnloadForUnitTests() - context.SetTestServer(nil) } } func TestIsRequestToItself_ReturnsFalseIfHostnamesDifferent(t *testing.T) { - cleanup := setupTestContext("http://aikido.dev:4000", true) + inst, cleanup := setupTestContext("http://aikido.dev:4000", true) defer cleanup() - result := IsRequestToItself("google.com", 4000) + result := IsRequestToItself(inst, "google.com", 4000) if result != false { t.Errorf("Expected false when hostnames are different, got %v", result) } } func TestIsRequestToItself_ReturnsFalseIfHostnamesDifferentHTTPS(t *testing.T) { - cleanup := setupTestContext("https://aikido.dev", true) + inst, cleanup := setupTestContext("https://aikido.dev", true) defer cleanup() - result := IsRequestToItself("google.com", 443) + result := IsRequestToItself(inst, "google.com", 443) if result != false { t.Errorf("Expected false when hostnames are different (HTTPS), got %v", result) } } func TestIsRequestToItself_ReturnsFalseIfHostnamesDifferentWithCustomPort(t *testing.T) { - cleanup := setupTestContext("https://aikido.dev:4000", true) + inst, cleanup := setupTestContext("https://aikido.dev:4000", true) defer cleanup() - result := IsRequestToItself("google.com", 443) + result := IsRequestToItself(inst, "google.com", 443) if result != false { t.Errorf("Expected false when hostnames are different (custom port), got %v", result) } @@ -94,10 +94,10 @@ func TestIsRequestToItself_ReturnsTrueIfServerDoesRequestToItself(t *testing.T) for _, tt := range tests { t.Run(tt.description, func(t *testing.T) { - cleanup := setupTestContext(tt.serverURL, true) + inst, cleanup := setupTestContext(tt.serverURL, true) defer cleanup() - result := IsRequestToItself(tt.outboundHostname, tt.outboundPort) + result := IsRequestToItself(inst, tt.outboundHostname, tt.outboundPort) if result != true { t.Errorf("Expected true for %s, got %v", tt.description, result) } @@ -128,10 +128,10 @@ func TestIsRequestToItself_ReturnsTrueForSpecialCaseHTTPHTTPS(t *testing.T) { for _, tt := range tests { t.Run(tt.description, func(t *testing.T) { - cleanup := setupTestContext(tt.serverURL, true) + inst, cleanup := setupTestContext(tt.serverURL, true) defer cleanup() - result := IsRequestToItself(tt.outboundHostname, tt.outboundPort) + result := IsRequestToItself(inst, tt.outboundHostname, tt.outboundPort) if result != true { t.Errorf("Expected true for special case %s, got %v", tt.description, result) } @@ -162,10 +162,10 @@ func TestIsRequestToItself_ReturnsFalseIfTrustProxyIsFalse(t *testing.T) { for _, tt := range tests { t.Run(tt.description, func(t *testing.T) { - cleanup := setupTestContext(tt.serverURL, false) // Trust proxy is false + inst, cleanup := setupTestContext(tt.serverURL, false) // Trust proxy is false defer cleanup() - result := IsRequestToItself(tt.outboundHostname, tt.outboundPort) + result := IsRequestToItself(inst, tt.outboundHostname, tt.outboundPort) if result != false { t.Errorf("Expected false when trust proxy is disabled for %s, got %v", tt.description, result) } diff --git a/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go b/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go index 9a3eb1eff..feef27526 100644 --- a/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go +++ b/lib/request-processor/vulnerabilities/zen-internals/zen_internals.go @@ -51,7 +51,7 @@ func Init() bool { handle := C.dlopen(zenInternalsLibPath, C.RTLD_LAZY) if handle == nil { - log.Errorf("Failed to load zen-internals library from '%s' with error %s!", C.GoString(zenInternalsLibPath), C.GoString(C.dlerror())) + log.Errorf(nil, "Failed to load zen-internals library from '%s' with error %s!", C.GoString(zenInternalsLibPath), C.GoString(C.dlerror())) return false } @@ -60,7 +60,7 @@ func Init() bool { vDetectSqlInjection := C.dlsym(handle, detectSqlInjectionFnName) if vDetectSqlInjection == nil { - log.Error("Failed to load detect_sql_injection function from zen-internals library!") + log.Error(nil, "Failed to load detect_sql_injection function from zen-internals library!") C.dlclose(handle) return false } @@ -68,7 +68,7 @@ func Init() bool { zenLib.handle = handle zenLib.detectSqlInjection = (C.detect_sql_injection_func)(vDetectSqlInjection) zenLib.initialized = true - log.Debugf("Loaded zen-internals library!") + log.Debugf(nil, "Loaded zen-internals library!") return true } @@ -114,6 +114,6 @@ func DetectSQLInjection(query string, user_input string, dialect int) int { cUserInput, userInputLen, C.int(dialect))) - log.Debugf("DetectSqlInjection(\"%s\", \"%s\", %d) -> %d", query, user_input, dialect, result) + log.Debugf(nil, "DetectSqlInjection(\"%s\", \"%s\", %d) -> %d", query, user_input, dialect, result) return result } diff --git a/package/rpm/aikido.spec b/package/rpm/aikido.spec index c5748d021..b09695e0a 100644 --- a/package/rpm/aikido.spec +++ b/package/rpm/aikido.spec @@ -56,19 +56,16 @@ fi -# Check if FrankenPHP is installed -# FRANKENPHP_PHP_VERSION="" -# if command -v frankenphp -v >/dev/null 2>&1; then -# if frankenphp -v >/dev/null 2>&1; then -# FRANKENPHP_PHP_VERSION=$(frankenphp -v 2>/dev/null | grep -oP 'PHP \K\d+\.\d+' | head -n 1) -# fi -# -# if [ -n "$FRANKENPHP_PHP_VERSION" ]; then -# echo "Found FrankenPHP with embedded PHP $FRANKENPHP_PHP_VERSION" -# else -# echo "Found FrankenPHP but could not determine PHP version" -# fi -# fi +FRANKENPHP_PHP_VERSION="" +if command -v frankenphp >/dev/null 2>&1; then + FRANKENPHP_PHP_VERSION=$(frankenphp -v 2>/dev/null | grep -oP 'PHP \K\d+\.\d+' | head -n 1) + + if [ -n "$FRANKENPHP_PHP_VERSION" ]; then + echo "Found FrankenPHP with embedded PHP $FRANKENPHP_PHP_VERSION" + else + echo "Found FrankenPHP but could not determine PHP version" + fi +fi for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Installing for PHP $PHP_VERSION..." @@ -146,38 +143,33 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do fi done -# Install for FrankenPHP if installed -# if [ -n "$FRANKENPHP_PHP_VERSION" ]; then -# echo "Installing for FrankenPHP with PHP $FRANKENPHP_PHP_VERSION..." -# -# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" -# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" -# -# # Install Aikido PHP extension for FrankenPHP -# if [ -d "$FRANKENPHP_EXT_DIR" ]; then -# echo "Installing new Aikido extension in $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." -# ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so -# else -# echo "FrankenPHP extension directory $FRANKENPHP_EXT_DIR not found! Creating it..." -# mkdir -p $FRANKENPHP_EXT_DIR -# ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so -# fi -# -# # Install Aikido ini file for FrankenPHP -# if [ -d "$FRANKENPHP_INI_DIR" ]; then -# echo "Installing new Aikido mod in $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." -# ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini -# else -# echo "FrankenPHP ini directory $FRANKENPHP_INI_DIR not found! Creating it..." -# mkdir -p $FRANKENPHP_INI_DIR -# ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini -# fi -# fi - -if [ ${#PHP_VERSIONS[@]} -eq 0 ]; then -# if [ ${#PHP_VERSIONS[@]} -eq 0 ] && [ -z "$FRANKENPHP_PHP_VERSION" ]; then - echo "No PHP found! Exiting!" -# echo "No PHP or FrankenPHP found! Exiting!" +if [ -n "$FRANKENPHP_PHP_VERSION" ]; then + echo "Installing for FrankenPHP with PHP $FRANKENPHP_PHP_VERSION... ZTS (Thread Safe)" + + FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" + FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" + + if [ -d "$FRANKENPHP_EXT_DIR" ]; then + echo "Installing new Aikido extension in $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." + ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION-zts.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so + else + echo "FrankenPHP extension directory $FRANKENPHP_EXT_DIR not found! Creating it..." + mkdir -p $FRANKENPHP_EXT_DIR + ln -sf /opt/aikido-%{version}/aikido-extension-php-$FRANKENPHP_PHP_VERSION-zts.so $FRANKENPHP_EXT_DIR/aikido-%{version}.so + fi + + if [ -d "$FRANKENPHP_INI_DIR" ]; then + echo "Installing new Aikido mod in $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." + ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + else + echo "FrankenPHP ini directory $FRANKENPHP_INI_DIR not found! Creating it..." + mkdir -p $FRANKENPHP_INI_DIR + ln -sf /opt/aikido-%{version}/aikido.ini $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + fi +fi + +if [ ${#PHP_VERSIONS[@]} -eq 0 ] && [ -z "$FRANKENPHP_PHP_VERSION" ]; then + echo "No PHP or FrankenPHP found! Exiting!" exit 1 fi @@ -217,13 +209,8 @@ done echo "Found PHP versions: ${PHP_VERSIONS[*]}" -# Check if FrankenPHP directories exist for uninstall -# FRANKENPHP_INSTALLED=false -# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" -# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" -# if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then -# FRANKENPHP_INSTALLED=true -# fi +FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" +FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" for PHP_VERSION in "${PHP_VERSIONS[@]}"; do echo "Uninstalling for PHP $PHP_VERSION..." @@ -279,22 +266,19 @@ for PHP_VERSION in "${PHP_VERSIONS[@]}"; do fi done -# Uninstall for FrankenPHP if directories exist -# FRANKENPHP_EXT_DIR="/usr/lib/frankenphp/modules" -# FRANKENPHP_INI_DIR="/etc/frankenphp/php.d" -# if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then -# echo "Uninstalling for FrankenPHP..." -# -# if [ -f "$FRANKENPHP_EXT_DIR/aikido-%{version}.so" ]; then -# echo "Uninstalling Aikido extension from $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." -# rm -f $FRANKENPHP_EXT_DIR/aikido-%{version}.so -# fi -# -# if [ -f "$FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini" ]; then -# echo "Uninstalling Aikido mod from $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." -# rm -f $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini -# fi -# fi +if [ -d "$FRANKENPHP_EXT_DIR" ] || [ -d "$FRANKENPHP_INI_DIR" ]; then + echo "Uninstalling for FrankenPHP..." + + if [ -f "$FRANKENPHP_EXT_DIR/aikido-%{version}.so" ]; then + echo "Uninstalling Aikido extension from $FRANKENPHP_EXT_DIR/aikido-%{version}.so..." + rm -f $FRANKENPHP_EXT_DIR/aikido-%{version}.so + fi + + if [ -f "$FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini" ]; then + echo "Uninstalling Aikido mod from $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini..." + rm -f $FRANKENPHP_INI_DIR/zz-aikido-%{version}.ini + fi +fi # Remove the Aikido logs folder rm -rf /var/log/aikido-%{version} diff --git a/tools/mock_aikido_core.py b/tools/mock_aikido_core.py index 2f6abb41b..cd33fec04 100755 --- a/tools/mock_aikido_core.py +++ b/tools/mock_aikido_core.py @@ -143,7 +143,7 @@ def mock_get_token(): if __name__ == '__main__': if len(sys.argv) < 2 or len(sys.argv) > 3: - print("Usage: python mock_server.py [config_file]") + print("Usage: python mock_aikido_core.py [config_file]") sys.exit(1) port = int(sys.argv[1]) diff --git a/tools/run_server_tests.py b/tools/run_server_tests.py index 373608ad7..16bead5c8 100755 --- a/tools/run_server_tests.py +++ b/tools/run_server_tests.py @@ -63,8 +63,6 @@ failed_tests = [] lock = threading.Lock() -max_concurrent_tests = 69 -test_semaphore = threading.Semaphore(max_concurrent_tests) def is_port_in_active_use(port): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: @@ -95,14 +93,10 @@ def print_test_results(s, tests): for t in tests: print(f"\t- {t}") -def handle_test_scenario_with_semaphore(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug): - test_semaphore.acquire() - try: - handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) - finally: - test_semaphore.release() - def handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug): + _handle_test_scenario_impl(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) + +def _handle_test_scenario_impl(data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug): test_name = data["test_name"] mock_port = data["mock_port"] server_port = data["server_port"] @@ -112,7 +106,7 @@ def handle_test_scenario(data, root_tests_dir, test_lib_dir, server, benchmark, try: print(f"Running {test_name}...") print(f"Starting mock server on port {mock_port} with start_config.json for {test_name}...") - mock_aikido_core = subprocess.Popen(["python3", "-u", "mock_aikido_core.py", str(mock_port), data["config_path"]]) + mock_aikido_core = subprocess.Popen(["python3", "-u", "mock_aikido_core.py", str(mock_port), data["config_path"]], cwd=os.path.dirname(os.path.abspath(__file__))) time.sleep(5) print(f"Starting {server} server on port {server_port} for {test_name}...") @@ -205,14 +199,12 @@ def main(root_tests_dir, test_lib_dir, test_dirs, server="php-built-in", benchma pre_tests() threads = [] - target_func = handle_test_scenario_with_semaphore if server in ["frankenphp-classic", "frankenphp-worker"] else handle_test_scenario for test_data in tests_data: args = (test_data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) - thread = threading.Thread(target=target_func, args=args) + thread = threading.Thread(target=handle_test_scenario, args=args) threads.append(thread) thread.start() time.sleep(10) - for thread in threads: thread.join() diff --git a/tools/server_tests/frankenphp_classic/main.py b/tools/server_tests/frankenphp_classic/main.py index 13e366a4b..7c9c66b11 100644 --- a/tools/server_tests/frankenphp_classic/main.py +++ b/tools/server_tests/frankenphp_classic/main.py @@ -63,9 +63,7 @@ def frankenphp_classic_pre_tests(tests_data): f.write("\n" + test_data["site_block"]) subprocess.Popen( - [frankenphp_bin, 'run', '--config', caddyfile_path], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL + [frankenphp_bin, 'run', '--config', caddyfile_path] ) time.sleep(20) diff --git a/tools/server_tests/frankenphp_worker/main.py b/tools/server_tests/frankenphp_worker/main.py index c7306707a..a59ed8574 100644 --- a/tools/server_tests/frankenphp_worker/main.py +++ b/tools/server_tests/frankenphp_worker/main.py @@ -8,7 +8,7 @@ log_dir = "/var/log/frankenphp" worker_scripts_dir = "/tmp/frankenphp_workers" -num_workers = 1 +num_workers = 2 caddyfile_base_template = """{{ frankenphp {{ @@ -107,7 +107,7 @@ def frankenphp_worker_pre_tests(tests_data): subprocess.run(['rm', '-rf', f'{worker_scripts_dir}/*']) total_workers = len(tests_data) - threads = total_workers * 2 + threads = total_workers * 3 with open(caddyfile_path, 'w') as f: f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads)) @@ -115,9 +115,7 @@ def frankenphp_worker_pre_tests(tests_data): f.write("\n" + test_data["site_block"]) process = subprocess.Popen( - [frankenphp_bin, 'run', '--config', caddyfile_path], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL + [frankenphp_bin, 'run', '--config', caddyfile_path] ) time.sleep(20) From 538e986a9fce4e8358e77fa68acc8e52a5ed8b7d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 15:06:14 +0000 Subject: [PATCH 041/170] Add URL retrieval to request metadata in OnPostRequest function --- lib/request-processor/handle_request_metadata.go | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 763ebd041..2e70970fc 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -46,6 +46,7 @@ func OnPostRequest(inst *instance.RequestProcessorInstance) string { User: context.GetUserId(inst), UserAgent: context.GetUserAgent(inst), IP: context.GetIp(inst), + Url: context.GetUrl(inst), RateLimitGroup: context.GetRateLimitGroup(inst), RateLimited: context.IsEndpointRateLimited(inst), QueryParsed: context.GetQueryParsed(inst), From a6dc44b211b22b67549d4a152af8153e0f6f91b3 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 15:42:22 +0000 Subject: [PATCH 042/170] Update PHP version support to include 8.5 across multiple workflows and Dockerfiles, and bump version to v2 for consistency in image tagging. --- .../workflows/Dockerfile.centos-php-test-zts | 37 +++++++++++++++++ .../workflows/Dockerfile.ubuntu-php-test-zts | 41 +++++++++++++++++++ .../build-centos-php-test-images-zts.yml | 6 +-- .../workflows/build-extension-images-zts.yml | 8 ++-- .../build-ubuntu-php-test-images-zts.yml | 8 ++-- .github/workflows/build.yml | 14 +++---- 6 files changed, 96 insertions(+), 18 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 3e6c95cd1..df4b85c8f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -146,6 +146,43 @@ RUN mkdir -p /etc/php-fpm.d && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +# Install FrankenPHP binary based on PHP version +# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 +RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ + FRANKENPHP_VERSION="1.10.1" \ + && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.x86_64.rpm" \ + && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ + && yum install -y /tmp/frankenphp.rpm \ + && rm -f /tmp/frankenphp.rpm; \ + elif [ "${PHP_VERSION}" = "8.4" ]; then \ + FRANKENPHP_VERSION="1.9.1" \ + && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.x86_64.rpm" \ + && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ + && yum install -y /tmp/frankenphp.rpm \ + && rm -f /tmp/frankenphp.rpm; \ + elif [ "${PHP_VERSION}" = "8.3" ]; then \ + FRANKENPHP_VERSION="1.3.2" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + elif [ "${PHP_VERSION}" = "8.2" ]; then \ + FRANKENPHP_VERSION="1.0.0-rc.3" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + fi + +# Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) +RUN mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules + # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ python3 -m pip install --no-cache-dir flask requests psutil diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index d223af21a..c143d3654 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -138,6 +138,47 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +# Install FrankenPHP binary based on PHP version +# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 +RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ + FRANKENPHP_VERSION="1.10.1" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + elif [ "${PHP_VERSION}" = "8.4" ]; then \ + FRANKENPHP_VERSION="1.9.1" \ + && FRANKENPHP_DEB_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp_${FRANKENPHP_VERSION}-1_amd64.deb" \ + && curl -fsSL -L -o /tmp/frankenphp.deb "$FRANKENPHP_DEB_URL" \ + && apt-get update \ + && apt-get install -y /tmp/frankenphp.deb \ + && rm -f /tmp/frankenphp.deb \ + && rm -rf /var/lib/apt/lists/*; \ + elif [ "${PHP_VERSION}" = "8.3" ]; then \ + FRANKENPHP_VERSION="1.3.2" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + elif [ "${PHP_VERSION}" = "8.2" ]; then \ + FRANKENPHP_VERSION="1.0.0-rc.3" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + fi + +# Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) +RUN mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules + # Install web servers and database (without PHP packages) RUN apt-get update && \ apt-get install -y --no-install-recommends \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index b81295a44..02b726736 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -10,14 +10,14 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-test-centos-zts - VERSION: v1 + VERSION: v2 jobs: build-amd64: runs-on: ubuntu-24.04 strategy: matrix: - php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] fail-fast: false permissions: { contents: read, packages: write } steps: @@ -45,7 +45,7 @@ jobs: runs-on: ubuntu-24.04-arm strategy: matrix: - php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] fail-fast: false permissions: { contents: read, packages: write } steps: diff --git a/.github/workflows/build-extension-images-zts.yml b/.github/workflows/build-extension-images-zts.yml index d5526fafc..52786e633 100644 --- a/.github/workflows/build-extension-images-zts.yml +++ b/.github/workflows/build-extension-images-zts.yml @@ -10,7 +10,7 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-build-extension-zts - VERSION: v1 + VERSION: v2 jobs: build-amd64: @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] permissions: contents: read packages: write @@ -53,7 +53,7 @@ jobs: strategy: fail-fast: false matrix: - php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] permissions: contents: read packages: write @@ -91,7 +91,7 @@ jobs: strategy: fail-fast: false matrix: - php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] permissions: contents: read packages: write diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index c2dbd1fa6..c8eda9587 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -10,13 +10,13 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-test-ubuntu-zts - VERSION: v1 + VERSION: v2 jobs: build-amd64: runs-on: ubuntu-24.04 strategy: - matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] } fail-fast: false permissions: { contents: read, packages: write } steps: @@ -42,7 +42,7 @@ jobs: build-arm64: runs-on: ubuntu-24.04-arm strategy: - matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] } fail-fast: false permissions: { contents: read, packages: write } steps: @@ -69,7 +69,7 @@ jobs: runs-on: ubuntu-24.04 needs: [build-amd64, build-arm64] strategy: - matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] } + matrix: { php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] } fail-fast: false permissions: { contents: read, packages: write } steps: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a10cd8fc9..eb2f96cd4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -138,7 +138,7 @@ jobs: container: ghcr.io/aikidosec/firewall-php-build-extension-zts:${{ matrix.php_version }}-v1 strategy: matrix: - php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] arch: [ '', '-arm' ] fail-fast: false @@ -371,7 +371,7 @@ jobs: name: CentOS NTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-centos-nts:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-centos-nts:${{ matrix.php_version }}-v2 options: --privileged needs: [ build_rpm ] strategy: @@ -457,7 +457,7 @@ jobs: name: Ubuntu NTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-ubuntu-nts:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-ubuntu-nts:${{ matrix.php_version }}-v2 options: --privileged needs: [ build_deb ] strategy: @@ -522,13 +522,13 @@ jobs: name: Ubuntu ZTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-ubuntu-zts:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-ubuntu-zts:${{ matrix.php_version }}-v2 options: --privileged needs: [ build_deb ] strategy: matrix: arch: ['', '-arm'] - php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] server: ['nginx-php-fpm', 'php-built-in'] fail-fast: false steps: @@ -590,12 +590,12 @@ jobs: name: CentOS ZTS php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: - image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v1 + image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v2 options: --privileged needs: [ build_rpm ] strategy: matrix: - php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] server: ['nginx-php-fpm', 'php-built-in'] arch: ['', '-arm'] fail-fast: false From 65a7a997bc0220173b3346bee9837c702ba6c24d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 15:50:06 +0000 Subject: [PATCH 043/170] Correct version --- lib/php-extension/Action.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index 115ed626c..3fa8b2d39 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -5,7 +5,7 @@ ACTION_STATUS Action::executeThrow(json &event) { std::string _message = event["message"].get(); zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); CallPhpFunctionWithOneParam("http_response_code", _code); - zend_throw_exception(zend_exception_get_default(), _message.c_str(), _code); + zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); return BLOCK; } From 5730f0bf0e8301cd8c37cad31e97121857b8f7c3 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 15:59:16 +0000 Subject: [PATCH 044/170] Update PHP version matrix to include 8.5 in CentOS PHP test image workflow --- .github/workflows/build-centos-php-test-images-zts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 02b726736..a4bef6f12 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -74,7 +74,7 @@ jobs: needs: [build-amd64, build-arm64] strategy: matrix: - php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4'] + php_version: ['7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] fail-fast: false permissions: { contents: read, packages: write } steps: From d62858fc718d92fe2a47da8bdfa1de580cf99e67 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 16:08:10 +0000 Subject: [PATCH 045/170] Update Docker container versions for PHP extension builds to v2 in the GitHub Actions workflow. --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index eb2f96cd4..6faa37370 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -81,7 +81,7 @@ jobs: build_php_extension_nts: name: Build php ${{ matrix.php_version }} extension NTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} - container: ghcr.io/aikidosec/firewall-php-build-extension-nts:${{ matrix.php_version }}-v1 + container: ghcr.io/aikidosec/firewall-php-build-extension-nts:${{ matrix.php_version }}-v2 strategy: matrix: php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] @@ -135,7 +135,7 @@ jobs: build_php_extension_zts: name: Build php ${{ matrix.php_version }} extension ZTS ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} - container: ghcr.io/aikidosec/firewall-php-build-extension-zts:${{ matrix.php_version }}-v1 + container: ghcr.io/aikidosec/firewall-php-build-extension-zts:${{ matrix.php_version }}-v2 strategy: matrix: php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] From 0548a778f0589b2cffd74ffe2c7751e189fac4a5 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 9 Dec 2025 16:15:10 +0000 Subject: [PATCH 046/170] Update PHP version matrix formatting and bump version to v2 in the build workflow for consistency. --- .github/workflows/build-extension-images-nts.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-extension-images-nts.yml b/.github/workflows/build-extension-images-nts.yml index 7ff184307..40d855570 100644 --- a/.github/workflows/build-extension-images-nts.yml +++ b/.github/workflows/build-extension-images-nts.yml @@ -10,7 +10,7 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-build-extension-nts - VERSION: v1 + VERSION: v2 jobs: build-amd64: @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - php_version: ['7.2','7.3','7.4','8.0','8.1','8.2','8.3','8.4','8.5'] + php_version: ['7.2', '7.3', '7.4', '8.0', '8.1', '8.2', '8.3', '8.4', '8.5'] permissions: contents: read packages: write From cbb015aff33bb1b4591e85e738364947332477ea Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 11:42:30 +0000 Subject: [PATCH 047/170] Refactor request processor instance management to include thread ID handling. Update related functions and tests to ensure thread safety and consistency in request processing. Fixed cli tests to accept TID in log messages --- .../context/context_for_unit_tests.go | 9 +++++-- .../context/request_context.go | 15 +++-------- .../handle_request_metadata.go | 3 +++ lib/request-processor/instance/manager.go | 2 +- lib/request-processor/instance/wrapper.go | 3 ++- lib/request-processor/utils/utils_test.go | 2 +- .../test_register_param_matcher.phpt | 6 ++--- .../test_register_param_matcher_invalid.phpt | 16 ++++++------ .../aikido_ops/test_set_rate_limit_group.phpt | 4 +-- .../cli/aikido_ops/test_set_token_works.phpt | 4 +-- tests/cli/outgoing_request/test_curl.phpt | 26 +++++++++---------- .../cli/outgoing_request/test_curl_share.phpt | 26 +++++++++---------- ...st_outgoing_request_file_get_contents.phpt | 6 ++--- .../shell_execution/test_shell_execution.phpt | 20 +++++++------- 14 files changed, 72 insertions(+), 70 deletions(-) diff --git a/lib/request-processor/context/context_for_unit_tests.go b/lib/request-processor/context/context_for_unit_tests.go index e64a054b6..1d689e125 100644 --- a/lib/request-processor/context/context_for_unit_tests.go +++ b/lib/request-processor/context/context_for_unit_tests.go @@ -1,6 +1,8 @@ package context // #include "../../API.h" +// #include +// static unsigned long get_thread_id() { return (unsigned long)pthread_self(); } import "C" import ( "encoding/json" @@ -43,15 +45,18 @@ func UnitTestsCallback(inst *instance.RequestProcessorInstance, context_id int) return "" } +func getThreadID() uint64 { + return uint64(C.get_thread_id()) +} + func LoadForUnitTests(context map[string]string) *instance.RequestProcessorInstance { tid := getThreadID() - mockInst := instance.NewRequestProcessorInstance(false) + mockInst := instance.NewRequestProcessorInstance(tid, false) if TestServer != nil { mockInst.SetCurrentServer(TestServer) mockInst.SetCurrentToken(TestServer.AikidoConfig.Token) } - mockInst.SetThreadID(tid) ctx := &RequestContextData{ inst: mockInst, diff --git a/lib/request-processor/context/request_context.go b/lib/request-processor/context/request_context.go index f2a70968f..204284b9e 100644 --- a/lib/request-processor/context/request_context.go +++ b/lib/request-processor/context/request_context.go @@ -1,8 +1,6 @@ package context // #include "../../API.h" -// #include -// static unsigned long get_thread_id() { return (unsigned long)pthread_self(); } import "C" import ( . "main/aikido_types" @@ -55,13 +53,13 @@ func GetServerPID() int32 { } func Init(instPtr unsafe.Pointer, callback CallbackFunction) bool { - tid := getThreadID() - inst := instance.GetInstance(instPtr) - if inst != nil { - inst.SetThreadID(tid) + if inst == nil { + return false } + tid := inst.GetThreadID() + globals.ContextInstances.Store(tid, instPtr) ctx := &RequestContextData{ @@ -82,11 +80,6 @@ func (ctx *RequestContextData) GetInstance() *instance.RequestProcessorInstance return ctx.inst } -// getThreadID is only called once during Init to bootstrap the threadID cache in inst -func getThreadID() uint64 { - return uint64(C.get_thread_id()) -} - func Clear(inst *instance.RequestProcessorInstance) bool { ctx := GetContext(inst) *ctx = RequestContextData{ diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 2e70970fc..e73d9bcc7 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -36,6 +36,9 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { } func OnPostRequest(inst *instance.RequestProcessorInstance) string { + if inst.GetCurrentServer() == nil { + return "" + } params := RequestShutdownParams{ ThreadID: inst.GetThreadID(), Token: inst.GetCurrentToken(), diff --git a/lib/request-processor/instance/manager.go b/lib/request-processor/instance/manager.go index af85e40f1..211b5abfb 100644 --- a/lib/request-processor/instance/manager.go +++ b/lib/request-processor/instance/manager.go @@ -25,7 +25,7 @@ func CreateInstance(threadID uint64, isZTS bool) unsafe.Pointer { return unsafe.Pointer(existingInstance) } - instance := NewRequestProcessorInstance(isZTS) + instance := NewRequestProcessorInstance(threadID, isZTS) instances[threadID] = instance // Pin to prevent GC while C++ holds the pointer diff --git a/lib/request-processor/instance/wrapper.go b/lib/request-processor/instance/wrapper.go index c92afcc5f..53931c78b 100644 --- a/lib/request-processor/instance/wrapper.go +++ b/lib/request-processor/instance/wrapper.go @@ -21,10 +21,11 @@ type RequestProcessorInstance struct { } // NewRequestProcessorInstance creates an instance. Pass isZTS=true for FrankenPHP. -func NewRequestProcessorInstance(isZTS bool) *RequestProcessorInstance { +func NewRequestProcessorInstance(threadID uint64, isZTS bool) *RequestProcessorInstance { return &RequestProcessorInstance{ CurrentToken: "", CurrentServer: nil, + threadID: threadID, isZTS: isZTS, } } diff --git a/lib/request-processor/utils/utils_test.go b/lib/request-processor/utils/utils_test.go index 29b2414d6..e5b14143c 100644 --- a/lib/request-processor/utils/utils_test.go +++ b/lib/request-processor/utils/utils_test.go @@ -381,7 +381,7 @@ func TestBuildRouteFromURL_WithParamMatchers(t *testing.T) { "slug": mustCompileCustomPattern("aikido-{alpha}-{digits}-{alpha}"), } - testInst := instance.NewRequestProcessorInstance(false) + testInst := instance.NewRequestProcessorInstance(0, false) testInst.SetCurrentServer(server) tests := []struct { diff --git a/tests/cli/aikido_ops/test_register_param_matcher.phpt b/tests/cli/aikido_ops/test_register_param_matcher.phpt index b726fbed9..c13b611be 100644 --- a/tests/cli/aikido_ops/test_register_param_matcher.phpt +++ b/tests/cli/aikido_ops/test_register_param_matcher.phpt @@ -12,6 +12,6 @@ $result = \aikido\register_param_matcher("param_name", "{digits}-{alpha}"); ?> ---EXPECT-- -[AIKIDO][INFO] Token changed to "AIK_RUNTIME_***UMMY" -[AIKIDO][INFO] Registered param matcher param_name -> {digits}-{alpha} \ No newline at end of file +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] Token changed to "AIK_RUNTIME_\*\*\*UMMY" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Registered param matcher param_name -> \{digits\}-\{alpha\} \ No newline at end of file diff --git a/tests/cli/aikido_ops/test_register_param_matcher_invalid.phpt b/tests/cli/aikido_ops/test_register_param_matcher_invalid.phpt index e8d2e280b..05d1e55c6 100644 --- a/tests/cli/aikido_ops/test_register_param_matcher_invalid.phpt +++ b/tests/cli/aikido_ops/test_register_param_matcher_invalid.phpt @@ -21,11 +21,11 @@ foreach ($invalidPatterns as $name => $pattern) { ?> ---EXPECT-- -[AIKIDO][INFO] Token changed to "AIK_RUNTIME_***UMMY" -Error compiling param matcher no_braces -> regex "digits-alpha": pattern should contain { or } -bool(false) -Error compiling param matcher unclosed_brace -> regex "{digits": pattern should contain { or } -bool(false) -Error compiling param matcher with_slash -> regex "aikido/{digits}": pattern should not contain slashes -bool(false) +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] Token changed to "AIK_RUNTIME_\*\*\*UMMY" +Error compiling param matcher no_braces -> regex "digits-alpha": pattern should contain \{ or \} +bool\(false\) +Error compiling param matcher unclosed_brace -> regex "\{digits": pattern should contain \{ or \} +bool\(false\) +Error compiling param matcher with_slash -> regex "aikido\/\{digits}": pattern should not contain slashes +bool\(false\) diff --git a/tests/cli/aikido_ops/test_set_rate_limit_group.phpt b/tests/cli/aikido_ops/test_set_rate_limit_group.phpt index 57137ba78..9158fb7ef 100644 --- a/tests/cli/aikido_ops/test_set_rate_limit_group.phpt +++ b/tests/cli/aikido_ops/test_set_rate_limit_group.phpt @@ -11,5 +11,5 @@ AIKIDO_LOG_LEVEL=INFO ?> ---EXPECT-- -[AIKIDO][INFO] Got rate limit group: my_user_group +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got rate limit group: my_user_group diff --git a/tests/cli/aikido_ops/test_set_token_works.phpt b/tests/cli/aikido_ops/test_set_token_works.phpt index d0bca41d2..31e72c7ce 100644 --- a/tests/cli/aikido_ops/test_set_token_works.phpt +++ b/tests/cli/aikido_ops/test_set_token_works.phpt @@ -11,5 +11,5 @@ AIKIDO_LOG_LEVEL=INFO ?> ---EXPECT-- -[AIKIDO][INFO] Token changed to "AIK_RUNTIME_***here" +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] Token changed to "AIK_RUNTIME_\*\*\*here" diff --git a/tests/cli/outgoing_request/test_curl.phpt b/tests/cli/outgoing_request/test_curl.phpt index fed21aa57..b9ef1390d 100644 --- a/tests/cli/outgoing_request/test_curl.phpt +++ b/tests/cli/outgoing_request/test_curl.phpt @@ -62,16 +62,16 @@ curl_close($ch6); ?> ---EXPECT-- -[AIKIDO][INFO] [BEFORE] Got domain: example.com -[AIKIDO][INFO] [AFTER] Got domain: example.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: httpbin.org -[AIKIDO][INFO] [AFTER] Got domain: httpbin.org port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: facebook.com -[AIKIDO][INFO] [AFTER] Got domain: facebook.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: facebook.com -[AIKIDO][INFO] [AFTER] Got domain: facebook.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: www.aikido.dev -[AIKIDO][INFO] [AFTER] Got domain: www.aikido.dev port: 80 -[AIKIDO][INFO] [BEFORE] Got domain: some-invalid-domain.com -[AIKIDO][INFO] [AFTER] Got domain: some-invalid-domain.com port: 4113 \ No newline at end of file +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: example.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: example.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: httpbin.org +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: httpbin.org port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: facebook.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: facebook.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: facebook.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: facebook.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: www.aikido.dev +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: www.aikido.dev port: 80 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: some-invalid-domain.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: some-invalid-domain.com port: 4113 \ No newline at end of file diff --git a/tests/cli/outgoing_request/test_curl_share.phpt b/tests/cli/outgoing_request/test_curl_share.phpt index e5bc699e4..4ce629aa4 100644 --- a/tests/cli/outgoing_request/test_curl_share.phpt +++ b/tests/cli/outgoing_request/test_curl_share.phpt @@ -66,17 +66,17 @@ curl_exec($ch6); ?> ---EXPECT-- -[AIKIDO][INFO] [BEFORE] Got domain: example.com -[AIKIDO][INFO] [AFTER] Got domain: example.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: httpbin.org -[AIKIDO][INFO] [AFTER] Got domain: httpbin.org port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: facebook.com -[AIKIDO][INFO] [AFTER] Got domain: facebook.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: facebook.com -[AIKIDO][INFO] [AFTER] Got domain: facebook.com port: 443 -[AIKIDO][INFO] [BEFORE] Got domain: www.aikido.dev -[AIKIDO][INFO] [AFTER] Got domain: www.aikido.dev port: 80 -[AIKIDO][INFO] [BEFORE] Got domain: some-invalid-domain.com -[AIKIDO][INFO] [AFTER] Got domain: some-invalid-domain.com port: 4113 +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: example.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: example.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: httpbin.org +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: httpbin.org port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: facebook.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: facebook.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: facebook.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: facebook.com port: 443 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: www.aikido.dev +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: www.aikido.dev port: 80 +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: some-invalid-domain.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: some-invalid-domain.com port: 4113 diff --git a/tests/cli/outgoing_request/test_outgoing_request_file_get_contents.phpt b/tests/cli/outgoing_request/test_outgoing_request_file_get_contents.phpt index 4025de203..e37d75bd1 100644 --- a/tests/cli/outgoing_request/test_outgoing_request_file_get_contents.phpt +++ b/tests/cli/outgoing_request/test_outgoing_request_file_get_contents.phpt @@ -12,6 +12,6 @@ file_get_contents("http://www.example.com"); ?> ---EXPECT-- -[AIKIDO][INFO] [BEFORE] Got domain: www.example.com -[AIKIDO][INFO] [AFTER] Got domain: www.example.com port: 80 \ No newline at end of file +--EXPECTREGEX-- +.*\[AIKIDO\]\[INFO\]\[tid:\d+\] \[BEFORE\] Got domain: www.example.com +\[AIKIDO\]\[INFO\]\[tid:\d+\] \[AFTER\] Got domain: www.example.com port: 80 \ No newline at end of file diff --git a/tests/cli/shell_execution/test_shell_execution.phpt b/tests/cli/shell_execution/test_shell_execution.phpt index 717d2159c..e45848eae 100644 --- a/tests/cli/shell_execution/test_shell_execution.phpt +++ b/tests/cli/shell_execution/test_shell_execution.phpt @@ -55,24 +55,24 @@ if (is_resource($process)) { echo "\n"; ?> ---EXPECTF-- -[AIKIDO][INFO] Got shell command: echo "Hello from exec!" +--EXPECTREGEX-- +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from exec!" Array -( - [0] => Hello from exec! -) +\( + \[0\] => Hello from exec! +\) -[AIKIDO][INFO] Got shell command: echo "Hello from shell_exec!" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from shell_exec!" Hello from shell_exec! -[AIKIDO][INFO] Got shell command: echo "Hello from system!" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from system!" Hello from system! -[AIKIDO][INFO] Got shell command: echo "Hello from passthru!" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from passthru!" Hello from passthru! -[AIKIDO][INFO] Got shell command: echo "Hello from popen!" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from popen!" Hello from popen! -[AIKIDO][INFO] Got shell command: echo "Hello from proc_open!" +\[AIKIDO\]\[INFO\]\[tid:\d+\] Got shell command: echo "Hello from proc_open!" Hello from proc_open! From bb8efbe504209249f53feefef9444d6f758a4326 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 14:22:12 +0200 Subject: [PATCH 048/170] Update version to v2 in CentOS and Ubuntu PHP test image workflows for consistency. --- .github/workflows/build-centos-php-test-images-nts.yml | 2 +- .github/workflows/build-ubuntu-php-test-images-nts.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-centos-php-test-images-nts.yml b/.github/workflows/build-centos-php-test-images-nts.yml index e140bfd7d..6546ac349 100644 --- a/.github/workflows/build-centos-php-test-images-nts.yml +++ b/.github/workflows/build-centos-php-test-images-nts.yml @@ -10,7 +10,7 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-test-centos-nts - VERSION: v1 + VERSION: v2 jobs: build-amd64: diff --git a/.github/workflows/build-ubuntu-php-test-images-nts.yml b/.github/workflows/build-ubuntu-php-test-images-nts.yml index 32e0e1f28..c218942a4 100644 --- a/.github/workflows/build-ubuntu-php-test-images-nts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-nts.yml @@ -10,7 +10,7 @@ on: env: REGISTRY: ghcr.io IMAGE_NAME: aikidosec/firewall-php-test-ubuntu-nts - VERSION: v1 + VERSION: v2 jobs: build-amd64: From b7880ed7103ab3dd2b3a9bcc6ca2ebf89322285d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 12:42:59 +0000 Subject: [PATCH 049/170] Update run-tests.php (from PHP 8.5) --- lib/php-extension/run-tests.php | 2974 +++++++++++++++++-------------- 1 file changed, 1682 insertions(+), 1292 deletions(-) diff --git a/lib/php-extension/run-tests.php b/lib/php-extension/run-tests.php index b185844b5..d0befa373 100644 --- a/lib/php-extension/run-tests.php +++ b/lib/php-extension/run-tests.php @@ -7,7 +7,7 @@ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | - | https://php.net/license/3_01.txt | + | https://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | license@php.net so we can mail you a copy immediately. | @@ -23,12 +23,10 @@ +----------------------------------------------------------------------+ */ -/* $Id: 32de2a11d1b29ffcf67a7e4dfab6d2190160aaf7 $ */ - /* Let there be no top-level code beyond this point: * Only functions and classes, thanks! * - * Minimum required PHP version: 7.1.0 + * Minimum required PHP version: 8.0.0 */ function show_usage(): void @@ -51,7 +49,7 @@ function show_usage(): void -w Write a list of all failed tests to . - -a Same as -w but append rather then truncating . + -a Same as -w but append rather than truncating . -W Write a list of all tests and their result status to . @@ -77,9 +75,11 @@ function show_usage(): void -s Write output to . - -x Sets 'SKIP_SLOW_TESTS' environmental variable. + -x Sets 'SKIP_SLOW_TESTS' environment variable. + + --online Prevents setting the 'SKIP_ONLINE_TESTS' environment variable. - --offline Sets 'SKIP_ONLINE_TESTS' environmental variable. + --offline Sets 'SKIP_ONLINE_TESTS' environment variable (default). --verbose -v Verbose mode. @@ -90,7 +90,7 @@ function show_usage(): void --temp-source --temp-target [--temp-urlbase ] Write temporary files to by replacing from the filenames to generate with . In general you want to make - the path to your source files and some patch in + the path to your source files and some path in your web page hierarchy with pointing to . --keep-[all|php|skip|clean] @@ -121,6 +121,14 @@ function show_usage(): void --color --no-color Do/Don't colorize the result type in the test result. + --progress + --no-progress Do/Don't show the current progress. + + --repeat [n] + Run the tests multiple times in the same process and check the + output of the last execution (CLI SAPI only). + + --bless Bless failed tests using scripts/dev/bless_tests.php. HELP; } @@ -138,20 +146,24 @@ function main(): void * looks like it doesn't belong, it probably doesn't; cull at will. */ global $DETAILED, $PHP_FAILED_TESTS, $SHOW_ONLY_GROUPS, $argc, $argv, $cfg, - $cfgfiles, $cfgtypes, $conf_passed, $end_time, $environment, + $end_time, $environment, $exts_skipped, $exts_tested, $exts_to_test, $failed_tests_file, - $ignored_by_ext, $ini_overwrites, $is_switch, $colorize, - $just_save_results, $log_format, $matches, $no_clean, $no_file_cache, - $optionals, $output_file, $pass_option_n, $pass_options, - $pattern_match, $php, $php_cgi, $phpdbg, $preload, $redir_tests, - $repeat, $result_tests_file, $slow_min_ms, $start_time, $switch, - $temp_source, $temp_target, $test_cnt, $test_dirs, - $test_files, $test_idx, $test_list, $test_results, $testfile, - $user_tests, $valgrind, $sum_results, $shuffle, $file_cache; + $ignored_by_ext, $ini_overwrites, $colorize, + $log_format, $no_clean, $no_file_cache, + $pass_options, $php, $php_cgi, $preload, + $result_tests_file, $slow_min_ms, $start_time, + $temp_source, $temp_target, $test_cnt, + $test_files, $test_idx, $test_results, $testfile, + $valgrind, $sum_results, $shuffle, $file_cache, $num_repeats, + $show_progress; // Parallel testing global $workers, $workerID; global $context_line_count; + // Temporary for the duration of refactoring + /** @var JUnit $junit */ + global $junit; + define('IS_WINDOWS', substr(PHP_OS, 0, 3) == "WIN"); $workerID = 0; @@ -212,81 +224,18 @@ function main(): void // fail to reattach to the OpCache because it will be using the // wrong path. die("TEMP environment is NOT set"); - } else { - if (count($environment) == 1) { - // Not having other environment variables, only having TEMP, is - // probably ok, but strange and may make a difference in the - // test pass rate, so warn the user. - echo "WARNING: Only 1 environment variable will be available to tests(TEMP environment variable)" . PHP_EOL; - } - } - } - - if (IS_WINDOWS && empty($environment["SystemRoot"])) { - $environment["SystemRoot"] = getenv("SystemRoot"); - } - - $php = null; - $php_cgi = null; - $phpdbg = null; - - if (getenv('TEST_PHP_EXECUTABLE')) { - $php = getenv('TEST_PHP_EXECUTABLE'); - - if ($php == 'auto') { - $php = TEST_PHP_SRCDIR . '/sapi/cli/php'; - putenv("TEST_PHP_EXECUTABLE=$php"); - - if (!getenv('TEST_PHP_CGI_EXECUTABLE')) { - $php_cgi = TEST_PHP_SRCDIR . '/sapi/cgi/php-cgi'; - - if (file_exists($php_cgi)) { - putenv("TEST_PHP_CGI_EXECUTABLE=$php_cgi"); - } else { - $php_cgi = null; - } - } - } - $environment['TEST_PHP_EXECUTABLE'] = $php; - } - - if (getenv('TEST_PHP_CGI_EXECUTABLE')) { - $php_cgi = getenv('TEST_PHP_CGI_EXECUTABLE'); - - if ($php_cgi == 'auto') { - $php_cgi = TEST_PHP_SRCDIR . '/sapi/cgi/php-cgi'; - putenv("TEST_PHP_CGI_EXECUTABLE=$php_cgi"); } - $environment['TEST_PHP_CGI_EXECUTABLE'] = $php_cgi; - } - - if (!getenv('TEST_PHPDBG_EXECUTABLE')) { - if (IS_WINDOWS && file_exists(dirname($php) . "/phpdbg.exe")) { - $phpdbg = realpath(dirname($php) . "/phpdbg.exe"); - } elseif (file_exists(dirname($php) . "/../../sapi/phpdbg/phpdbg")) { - $phpdbg = realpath(dirname($php) . "/../../sapi/phpdbg/phpdbg"); - } elseif (file_exists("./sapi/phpdbg/phpdbg")) { - $phpdbg = realpath("./sapi/phpdbg/phpdbg"); - } elseif (file_exists(dirname($php) . "/phpdbg")) { - $phpdbg = realpath(dirname($php) . "/phpdbg"); - } else { - $phpdbg = null; - } - if ($phpdbg) { - putenv("TEST_PHPDBG_EXECUTABLE=$phpdbg"); + if (count($environment) == 1) { + // Not having other environment variables, only having TEMP, is + // probably ok, but strange and may make a difference in the + // test pass rate, so warn the user. + echo "WARNING: Only 1 environment variable will be available to tests(TEMP environment variable)" , PHP_EOL; } } - if (getenv('TEST_PHPDBG_EXECUTABLE')) { - $phpdbg = getenv('TEST_PHPDBG_EXECUTABLE'); - - if ($phpdbg == 'auto') { - $phpdbg = TEST_PHP_SRCDIR . '/sapi/phpdbg/phpdbg'; - putenv("TEST_PHPDBG_EXECUTABLE=$phpdbg"); - } - - $environment['TEST_PHPDBG_EXECUTABLE'] = $phpdbg; + if (IS_WINDOWS && empty($environment["SystemRoot"])) { + $environment["SystemRoot"] = getenv("SystemRoot"); } if (getenv('TEST_PHP_LOG_FORMAT')) { @@ -302,7 +251,7 @@ function main(): void $DETAILED = 0; } - junit_init(); + $junit = new JUnit($environment, $workerID); if (getenv('SHOW_ONLY_GROUPS')) { $SHOW_ONLY_GROUPS = explode(",", getenv('SHOW_ONLY_GROUPS')); @@ -311,10 +260,9 @@ function main(): void } // Check whether user test dirs are requested. + $user_tests = []; if (getenv('TEST_PHP_USER')) { $user_tests = explode(',', getenv('TEST_PHP_USER')); - } else { - $user_tests = []; } $exts_to_test = []; @@ -324,12 +272,12 @@ function main(): void 'disable_functions=', 'output_buffering=Off', 'error_reporting=' . E_ALL, + 'fatal_error_backtraces=Off', 'display_errors=1', 'display_startup_errors=1', 'log_errors=0', 'html_errors=0', 'track_errors=0', - 'report_memleaks=1', 'report_zend_debug=0', 'docref_root=', 'docref_ext=.html', @@ -341,7 +289,6 @@ function main(): void 'precision=14', 'serialize_precision=-1', 'memory_limit=128M', - 'log_errors_max_len=0', 'opcache.fast_shutdown=0', 'opcache.file_update_protection=0', 'opcache.revalidate_freq=0', @@ -349,6 +296,10 @@ function main(): void 'opcache.jit_hot_func=1', 'opcache.jit_hot_return=1', 'opcache.jit_hot_side_exit=1', + 'opcache.jit_max_root_traces=100000', + 'opcache.jit_max_side_traces=100000', + 'opcache.jit_max_exit_counters=100000', + 'opcache.protect_memory=1', 'zend.assertions=1', 'zend.exception_ignore_args=0', 'zend.exception_string_param_max_len=15', @@ -357,11 +308,6 @@ function main(): void $no_file_cache = '-d opcache.file_cache= -d opcache.file_cache_only=0'; - define('PHP_QA_EMAIL', 'qa-reports@lists.php.net'); - define('QA_SUBMISSION_PAGE', 'http://qa.php.net/buildtest-process.php'); - define('QA_REPORTS_PAGE', 'http://qa.php.net/reports'); - define('TRAVIS_CI', (bool) getenv('TRAVIS')); - // Determine the tests to be run. $test_files = []; @@ -395,7 +341,7 @@ function main(): void if (function_exists('sapi_windows_vt100_support') && !sapi_windows_vt100_support(STDOUT, true)) { $colorize = false; } - if (array_key_exists('NO_COLOR', $_ENV)) { + if (array_key_exists('NO_COLOR', $environment)) { $colorize = false; } $selected_tests = false; @@ -403,8 +349,13 @@ function main(): void $preload = false; $file_cache = null; $shuffle = false; + $bless = false; $workers = null; $context_line_count = 3; + $num_repeats = 1; + $show_progress = true; + $ignored_by_ext = []; + $online = null; $cfgtypes = ['show', 'keep']; $cfgfiles = ['skip', 'php', 'clean', 'out', 'diff', 'exp', 'mem']; @@ -440,15 +391,13 @@ function main(): void $is_switch = true; - if ($repeat) { - foreach ($cfgtypes as $type) { - if (strpos($switch, '--' . $type) === 0) { - foreach ($cfgfiles as $file) { - if ($switch == '--' . $type . '-' . $file) { - $cfg[$type][$file] = true; - $is_switch = false; - break; - } + foreach ($cfgtypes as $type) { + if (strpos($switch, '--' . $type) === 0) { + foreach ($cfgfiles as $file) { + if ($switch == '--' . $type . '-' . $file) { + $cfg[$type][$file] = true; + $is_switch = false; + break; } } } @@ -464,7 +413,7 @@ function main(): void switch ($switch) { case 'j': $workers = substr($argv[$i], 2); - if (!preg_match('/^\d+$/', $workers) || $workers == 0) { + if ($workers == 0 || !preg_match('/^\d+$/', $workers)) { error("'$workers' is not a valid number of workers, try e.g. -j16 for 16 workers"); } $workers = intval($workers, 10); @@ -481,10 +430,8 @@ function main(): void $matches = []; if (preg_match('/^#.*\[(.*)\]\:\s+(.*)$/', $test, $matches)) { $redir_tests[] = [$matches[1], $matches[2]]; - } else { - if (strlen($test)) { - $test_files[] = trim($test); - } + } elseif (strlen($test)) { + $test_files[] = trim($test); } } } @@ -511,13 +458,11 @@ function main(): void case 'g': $SHOW_ONLY_GROUPS = explode(",", $argv[++$i]); break; - //case 'h' case '--keep-all': foreach ($cfgfiles as $file) { $cfg['keep'][$file] = true; } break; - //case 'l' case 'm': $valgrind = new RuntestsValgrind($environment); break; @@ -535,6 +480,7 @@ function main(): void break; case '--preload': $preload = true; + $environment['SKIP_PRELOAD'] = 1; break; case '--file-cache-prime': $file_cache = 'prime'; @@ -565,7 +511,6 @@ function main(): void putenv('NO_INTERACTION=1'); $environment['NO_INTERACTION'] = 1; break; - //case 'r' case 's': $output_file = $argv[++$i]; $just_save_results = true; @@ -609,8 +554,11 @@ function main(): void case 'x': $environment['SKIP_SLOW_TESTS'] = 1; break; + case '--online': + $online = true; + break; case '--offline': - $environment['SKIP_ONLINE_TESTS'] = 1; + $online = false; break; case '--shuffle': $shuffle = true; @@ -623,6 +571,7 @@ function main(): void $environment['SKIP_PERF_SENSITIVE'] = 1; if ($switch === '--msan') { $environment['SKIP_MSAN'] = 1; + $environment['MSAN_OPTIONS'] = 'intercept_tls_get_addr=0'; } $lsanSuppressions = __DIR__ . '/.github/lsan-suppressions.txt'; @@ -631,7 +580,13 @@ function main(): void . ':print_suppressions=0'; } break; - //case 'w' + case '--repeat': + $num_repeats = (int) $argv[++$i]; + $environment['SKIP_REPEAT'] = 1; + break; + case '--bless': + $bless = true; + break; case '-': // repeat check with full switch $switch = $argv[$i]; @@ -639,8 +594,14 @@ function main(): void $repeat = true; } break; + case '--progress': + $show_progress = true; + break; + case '--no-progress': + $show_progress = false; + break; case '--version': - echo '$Id: 32de2a11d1b29ffcf67a7e4dfab6d2190160aaf7 $' . "\n"; + echo '$Id$' . "\n"; exit(1); default: @@ -661,44 +622,64 @@ function main(): void if (!$testfile && strpos($argv[$i], '*') !== false && function_exists('glob')) { if (substr($argv[$i], -5) == '.phpt') { $pattern_match = glob($argv[$i]); + } elseif (preg_match("/\*$/", $argv[$i])) { + $pattern_match = glob($argv[$i] . '.phpt'); } else { - if (preg_match("/\*$/", $argv[$i])) { - $pattern_match = glob($argv[$i] . '.phpt'); - } else { - die('Cannot find test file "' . $argv[$i] . '".' . PHP_EOL); - } + die('Cannot find test file "' . $argv[$i] . '".' . PHP_EOL); } if (is_array($pattern_match)) { $test_files = array_merge($test_files, $pattern_match); } + } elseif (is_dir($testfile)) { + find_files($testfile); + } elseif (substr($testfile, -5) == '.phpt') { + $test_files[] = $testfile; } else { - if (is_dir($testfile)) { - find_files($testfile); - } else { - if (substr($testfile, -5) == '.phpt') { - $test_files[] = $testfile; - } else { - die('Cannot find test file "' . $argv[$i] . '".' . PHP_EOL); - } - } + die('Cannot find test file "' . $argv[$i] . '".' . PHP_EOL); } } } + if ($online === null && !isset($environment['SKIP_ONLINE_TESTS'])) { + $online = false; + } + if ($online !== null) { + $environment['SKIP_ONLINE_TESTS'] = $online ? '0' : '1'; + } + + if (!defined('STDIN') || !stream_isatty(STDIN) + || !defined('STDOUT') || !stream_isatty(STDOUT) + || !defined('STDERR') || !stream_isatty(STDERR)) { + $environment['SKIP_IO_CAPTURE_TESTS'] = '1'; + } + if ($selected_tests && count($test_files) === 0) { echo "No tests found.\n"; return; } - // Default to PHP_BINARY as executable - if (!isset($environment['TEST_PHP_EXECUTABLE'])) { - $php = PHP_BINARY; - putenv("TEST_PHP_EXECUTABLE=$php"); - $environment['TEST_PHP_EXECUTABLE'] = $php; + if (!$php) { + $php = getenv('TEST_PHP_EXECUTABLE') ?: PHP_BINARY; } - if (strlen($conf_passed)) { + $php_cgi = getenv('TEST_PHP_CGI_EXECUTABLE') ?: get_binary($php, 'php-cgi', 'sapi/cgi/php-cgi'); + $phpdbg = getenv('TEST_PHPDBG_EXECUTABLE') ?: get_binary($php, 'phpdbg', 'sapi/phpdbg/phpdbg'); + + putenv("TEST_PHP_EXECUTABLE=$php"); + $environment['TEST_PHP_EXECUTABLE'] = $php; + putenv("TEST_PHP_EXECUTABLE_ESCAPED=" . escapeshellarg($php)); + $environment['TEST_PHP_EXECUTABLE_ESCAPED'] = escapeshellarg($php); + putenv("TEST_PHP_CGI_EXECUTABLE=$php_cgi"); + $environment['TEST_PHP_CGI_EXECUTABLE'] = $php_cgi; + putenv("TEST_PHP_CGI_EXECUTABLE_ESCAPED=" . escapeshellarg($php_cgi ?? '')); + $environment['TEST_PHP_CGI_EXECUTABLE_ESCAPED'] = escapeshellarg($php_cgi ?? ''); + putenv("TEST_PHPDBG_EXECUTABLE=$phpdbg"); + $environment['TEST_PHPDBG_EXECUTABLE'] = $phpdbg; + putenv("TEST_PHPDBG_EXECUTABLE_ESCAPED=" . escapeshellarg($phpdbg ?? '')); + $environment['TEST_PHPDBG_EXECUTABLE_ESCAPED'] = escapeshellarg($phpdbg ?? ''); + + if ($conf_passed !== null) { if (IS_WINDOWS) { $pass_options .= " -c " . escapeshellarg($conf_passed); } else { @@ -712,19 +693,23 @@ function main(): void // Run selected tests. $test_cnt = count($test_files); - verify_config(); - write_information(); + if ($test_cnt === 1) { + $cfg['show']['diff'] = true; + } + + verify_config($php); + write_information($user_tests, $phpdbg); if ($test_cnt) { putenv('NO_INTERACTION=1'); usort($test_files, "test_sort"); - $start_time = time(); + $start_time = hrtime(true); echo "Running selected tests.\n"; $test_idx = 0; run_all_tests($test_files, $environment); - $end_time = time(); + $end_time = hrtime(true); if ($failed_tests_file) { fclose($failed_tests_file); @@ -744,33 +729,21 @@ function main(): void echo get_summary(false); if ($output_file != '' && $just_save_results) { - save_or_mail_results(); + save_results($output_file, /* prompt_to_save_results: */ false); } } else { // Compile a list of all test files (*.phpt). $test_files = []; - $exts_tested = count($exts_to_test); - $exts_skipped = 0; - $ignored_by_ext = 0; + $exts_tested = $exts_to_test; + $exts_skipped = []; sort($exts_to_test); - $test_dirs = []; - $optionals = ['Zend', 'tests', 'ext', 'sapi']; - foreach ($optionals as $dir) { + foreach (['Zend', 'tests', 'ext', 'sapi'] as $dir) { if (is_dir($dir)) { - $test_dirs[] = $dir; + find_files(TEST_PHP_SRCDIR . "/{$dir}", $dir == 'ext'); } } - // Convert extension names to lowercase - foreach ($exts_to_test as $key => $val) { - $exts_to_test[$key] = strtolower($val); - } - - foreach ($test_dirs as $dir) { - find_files(TEST_PHP_SRCDIR . "/{$dir}", $dir == 'ext'); - } - foreach ($user_tests as $dir) { find_files($dir, $dir == 'ext'); } @@ -778,13 +751,14 @@ function main(): void $test_files = array_unique($test_files); usort($test_files, "test_sort"); - $start_time = time(); - show_start($start_time); + $start_timestamp = time(); + $start_time = hrtime(true); + show_start($start_timestamp); $test_cnt = count($test_files); $test_idx = 0; run_all_tests($test_files, $environment); - $end_time = time(); + $end_time = hrtime(true); if ($failed_tests_file) { fclose($failed_tests_file); @@ -803,40 +777,24 @@ function main(): void compute_summary(); - show_end($end_time); + show_end($start_timestamp, $start_time, $end_time); show_summary(); - save_or_mail_results(); + save_results($output_file, /* prompt_to_save_results: */ true); } - junit_save_xml(); + $junit->saveXML(); + if ($bless) { + bless_failed_tests($PHP_FAILED_TESTS['FAILED']); + } if (getenv('REPORT_EXIT_STATUS') !== '0' && getenv('REPORT_EXIT_STATUS') !== 'no' && ($sum_results['FAILED'] || $sum_results['BORKED'] || $sum_results['LEAKED'])) { exit(1); } } -if (!function_exists("hrtime")) { - /** - * @return array|float|int - */ - function hrtime(bool $as_num = false) - { - $t = microtime(true); - - if ($as_num) { - return $t * 1000000000; - } - - $s = floor($t); - return [0 => $s, 1 => ($t - $s) * 1000000000]; - } -} - -function verify_config(): void +function verify_config(string $php): void { - global $php; - if (empty($php) || !file_exists($php)) { error('environment variable TEST_PHP_EXECUTABLE must be set to specify PHP executable!'); } @@ -846,9 +804,13 @@ function verify_config(): void } } -function write_information(): void +/** + * @param string[] $user_tests + */ +function write_information(array $user_tests, $phpdbg): void { - global $php, $php_cgi, $phpdbg, $php_info, $user_tests, $ini_overwrites, $pass_options, $exts_to_test, $valgrind, $no_file_cache; + global $php, $php_cgi, $php_info, $ini_overwrites, $pass_options, $exts_to_test, $valgrind, $no_file_cache; + $php_escaped = escapeshellarg($php); // Get info from php $info_file = __DIR__ . '/run-test-info.php'; @@ -864,11 +826,12 @@ function write_information(): void $info_params = []; settings2array($ini_overwrites, $info_params); $info_params = settings2params($info_params); - $php_info = `$php $pass_options $info_params $no_file_cache "$info_file"`; - define('TESTED_PHP_VERSION', `$php -n -r "echo PHP_VERSION;"`); + $php_info = shell_exec("$php_escaped $pass_options $info_params $no_file_cache \"$info_file\""); + define('TESTED_PHP_VERSION', shell_exec("$php_escaped -n -r \"echo PHP_VERSION;\"")); if ($php_cgi && $php != $php_cgi) { - $php_info_cgi = `$php_cgi $pass_options $info_params $no_file_cache -q "$info_file"`; + $php_cgi_escaped = escapeshellarg($php_cgi); + $php_info_cgi = shell_exec("$php_cgi_escaped $pass_options $info_params $no_file_cache -q \"$info_file\""); $php_info_sep = "\n---------------------------------------------------------------------"; $php_cgi_info = "$php_info_sep\nPHP : $php_cgi $php_info_cgi$php_info_sep"; } else { @@ -876,7 +839,8 @@ function write_information(): void } if ($phpdbg) { - $phpdbg_info = `$phpdbg $pass_options $info_params $no_file_cache -qrr "$info_file"`; + $phpdbg_escaped = escapeshellarg($phpdbg); + $phpdbg_info = shell_exec("$phpdbg_escaped $pass_options $info_params $no_file_cache -qrr \"$info_file\""); $php_info_sep = "\n---------------------------------------------------------------------"; $phpdbg_info = "$php_info_sep\nPHP : $phpdbg $phpdbg_info$php_info_sep"; } else { @@ -888,17 +852,28 @@ function write_information(): void } @unlink($info_file); - // load list of enabled extensions - save_text($info_file, - ''); - $exts_to_test = explode(',', `$php $pass_options $info_params $no_file_cache "$info_file"`); + // load list of enabled and loadable extensions + save_text($info_file, <<<'PHP' + ['session.auto_start=0'], 'tidy' => ['tidy.clean_output=0'], 'zlib' => ['zlib.output_compression=Off'], 'xdebug' => ['xdebug.mode=off'], - 'mbstring' => ['mbstring.func_overload=0'], ]; foreach ($info_params_ex as $ext => $ini_overwrites_ex) { @@ -927,134 +902,118 @@ function write_information(): void "; } -function save_or_mail_results(): void +function save_results(string $output_file, bool $prompt_to_save_results): void { - global $sum_results, $just_save_results, $failed_test_summary, - $PHP_FAILED_TESTS, $php, $output_file; + global $sum_results, $failed_test_summary, $PHP_FAILED_TESTS, $php; - /* We got failed Tests, offer the user to send an e-mail to QA team, unless NO_INTERACTION is set */ - if (!getenv('NO_INTERACTION') && !TRAVIS_CI) { + if (getenv('NO_INTERACTION')) { + return; + } + + if ($prompt_to_save_results) { + /* We got failed Tests, offer the user to save a QA report */ $fp = fopen("php://stdin", "r+"); if ($sum_results['FAILED'] || $sum_results['BORKED'] || $sum_results['WARNED'] || $sum_results['LEAKED']) { echo "\nYou may have found a problem in PHP."; } - echo "\nThis report can be automatically sent to the PHP QA team at\n"; - echo QA_REPORTS_PAGE . " and http://news.php.net/php.qa.reports\n"; + echo "\nThis report can be saved and used to open an issue on the bug tracker at\n"; + echo "https://github.com/php/php-src/issues\n"; echo "This gives us a better understanding of PHP's behavior.\n"; - echo "If you don't want to send the report immediately you can choose\n"; - echo "option \"s\" to save it. You can then email it to " . PHP_QA_EMAIL . " later.\n"; - echo "Do you want to send this report now? [Yns]: "; + echo "Do you want to save this report in a file? [Yn]: "; flush(); $user_input = fgets($fp, 10); - $just_save_results = (!empty($user_input) && strtolower($user_input[0]) === 's'); - } - - if ($just_save_results || !getenv('NO_INTERACTION') || TRAVIS_CI) { - if ($just_save_results || TRAVIS_CI || strlen(trim($user_input)) == 0 || strtolower($user_input[0]) == 'y') { - /* - * Collect information about the host system for our report - * Fetch phpinfo() output so that we can see the PHP environment - * Make an archive of all the failed tests - * Send an email - */ - if ($just_save_results) { - $user_input = 's'; - } - - /* Ask the user to provide an email address, so that QA team can contact the user */ - if (TRAVIS_CI) { - $user_email = 'travis at php dot net'; - } elseif (!strncasecmp($user_input, 'y', 1) || strlen(trim($user_input)) == 0) { - echo "\nPlease enter your email address.\n(Your address will be mangled so that it will not go out on any\nmailinglist in plain text): "; - flush(); - $user_email = trim(fgets($fp, 1024)); - $user_email = str_replace("@", " at ", str_replace(".", " dot ", $user_email)); - } - - $failed_tests_data = ''; - $sep = "\n" . str_repeat('=', 80) . "\n"; - $failed_tests_data .= $failed_test_summary . "\n"; - $failed_tests_data .= get_summary(true) . "\n"; - - if ($sum_results['FAILED']) { - foreach ($PHP_FAILED_TESTS['FAILED'] as $test_info) { - $failed_tests_data .= $sep . $test_info['name'] . $test_info['info']; - $failed_tests_data .= $sep . file_get_contents(realpath($test_info['output'])); - $failed_tests_data .= $sep . file_get_contents(realpath($test_info['diff'])); - $failed_tests_data .= $sep . "\n\n"; - } - $status = "failed"; - } else { - $status = "success"; - } - - $failed_tests_data .= "\n" . $sep . 'BUILD ENVIRONMENT' . $sep; - $failed_tests_data .= "OS:\n" . PHP_OS . " - " . php_uname() . "\n\n"; - $ldd = $autoconf = $sys_libtool = $libtool = $compiler = 'N/A'; - - if (!IS_WINDOWS) { - /* If PHP_AUTOCONF is set, use it; otherwise, use 'autoconf'. */ - if (getenv('PHP_AUTOCONF')) { - $autoconf = shell_exec(getenv('PHP_AUTOCONF') . ' --version'); - } else { - $autoconf = shell_exec('autoconf --version'); - } - - /* Always use the generated libtool - Mac OSX uses 'glibtool' */ - $libtool = shell_exec(INIT_DIR . '/libtool --version'); + fclose($fp); + if (!(strlen(trim($user_input)) == 0 || strtolower($user_input[0]) == 'y')) { + return; + } + } + /** + * Collect information about the host system for our report + * Fetch phpinfo() output so that we can see the PHP environment + * Make an archive of all the failed tests + */ + $failed_tests_data = ''; + $sep = "\n" . str_repeat('=', 80) . "\n"; + $failed_tests_data .= $failed_test_summary . "\n"; + $failed_tests_data .= get_summary(true) . "\n"; + + if ($sum_results['FAILED']) { + foreach ($PHP_FAILED_TESTS['FAILED'] as $test_info) { + $failed_tests_data .= $sep . $test_info['name'] . $test_info['info']; + $failed_tests_data .= $sep . file_get_contents(realpath($test_info['output'])); + $failed_tests_data .= $sep . file_get_contents(realpath($test_info['diff'])); + $failed_tests_data .= $sep . "\n\n"; + } + } - /* Use shtool to find out if there is glibtool present (MacOSX) */ - $sys_libtool_path = shell_exec(__DIR__ . '/build/shtool path glibtool libtool'); + $failed_tests_data .= "\n" . $sep . 'BUILD ENVIRONMENT' . $sep; + $failed_tests_data .= "OS:\n" . PHP_OS . " - " . php_uname() . "\n\n"; + $ldd = $autoconf = $sys_libtool = $libtool = $compiler = 'N/A'; - if ($sys_libtool_path) { - $sys_libtool = shell_exec(str_replace("\n", "", $sys_libtool_path) . ' --version'); - } + if (!IS_WINDOWS) { + /* If PHP_AUTOCONF is set, use it; otherwise, use 'autoconf'. */ + if (getenv('PHP_AUTOCONF')) { + $autoconf = shell_exec(getenv('PHP_AUTOCONF') . ' --version'); + } else { + $autoconf = shell_exec('autoconf --version'); + } - /* Try the most common flags for 'version' */ - $flags = ['-v', '-V', '--version']; - $cc_status = 0; + /* Always use the generated libtool - Mac OSX uses 'glibtool' */ + $libtool = shell_exec(INIT_DIR . '/libtool --version'); - foreach ($flags as $flag) { - system(getenv('CC') . " $flag >/dev/null 2>&1", $cc_status); - if ($cc_status == 0) { - $compiler = shell_exec(getenv('CC') . " $flag 2>&1"); - break; - } - } + /* Use shtool to find out if there is glibtool present (MacOSX) */ + $sys_libtool_path = shell_exec(__DIR__ . '/build/shtool path glibtool libtool'); - $ldd = shell_exec("ldd $php 2>/dev/null"); - } + if ($sys_libtool_path) { + $sys_libtool = shell_exec(str_replace("\n", "", $sys_libtool_path) . ' --version'); + } - $failed_tests_data .= "Autoconf:\n$autoconf\n"; - $failed_tests_data .= "Bundled Libtool:\n$libtool\n"; - $failed_tests_data .= "System Libtool:\n$sys_libtool\n"; - $failed_tests_data .= "Compiler:\n$compiler\n"; - $failed_tests_data .= "Bison:\n" . shell_exec('bison --version 2>/dev/null') . "\n"; - $failed_tests_data .= "Libraries:\n$ldd\n"; - $failed_tests_data .= "\n"; + /* Try the most common flags for 'version' */ + $flags = ['-v', '-V', '--version']; + $cc_status = 0; - if (isset($user_email)) { - $failed_tests_data .= "User's E-mail: " . $user_email . "\n\n"; + foreach ($flags as $flag) { + system(getenv('CC') . " $flag >/dev/null 2>&1", $cc_status); + if ($cc_status == 0) { + $compiler = shell_exec(getenv('CC') . " $flag 2>&1"); + break; } + } - $failed_tests_data .= $sep . "PHPINFO" . $sep; - $failed_tests_data .= shell_exec($php . ' -ddisplay_errors=stderr -dhtml_errors=0 -i 2> /dev/null'); + $ldd = shell_exec("ldd $php 2>/dev/null"); + } - if (($just_save_results || !mail_qa_team($failed_tests_data, $status)) && !TRAVIS_CI) { - file_put_contents($output_file, $failed_tests_data); + $failed_tests_data .= "Autoconf:\n$autoconf\n"; + $failed_tests_data .= "Bundled Libtool:\n$libtool\n"; + $failed_tests_data .= "System Libtool:\n$sys_libtool\n"; + $failed_tests_data .= "Compiler:\n$compiler\n"; + $failed_tests_data .= "Bison:\n" . shell_exec('bison --version 2>/dev/null') . "\n"; + $failed_tests_data .= "Libraries:\n$ldd\n"; + $failed_tests_data .= "\n"; + $failed_tests_data .= $sep . "PHPINFO" . $sep; + $failed_tests_data .= shell_exec($php . ' -ddisplay_errors=stderr -dhtml_errors=0 -i 2> /dev/null'); - if (!$just_save_results) { - echo "\nThe test script was unable to automatically send the report to PHP's QA Team\n"; - } + file_put_contents($output_file, $failed_tests_data); + echo "Report saved to: ", $output_file, "\n"; +} - echo "Please send " . $output_file . " to " . PHP_QA_EMAIL . " manually, thank you.\n"; - } elseif (!getenv('NO_INTERACTION') && !TRAVIS_CI) { - fwrite($fp, "\nThank you for helping to make PHP better.\n"); - fclose($fp); - } - } +function get_binary(string $php, string $sapi, string $sapi_path): ?string +{ + $dir = dirname($php); + if (IS_WINDOWS && file_exists("$dir/$sapi.exe")) { + return realpath("$dir/$sapi.exe"); + } + // Sources tree + if (file_exists("$dir/../../$sapi_path")) { + return realpath("$dir/../../$sapi_path"); + } + // Installation tree, preserve command prefix/suffix + $inst = str_replace('php', $sapi, basename($php)); + if (file_exists("$dir/$inst")) { + return realpath("$dir/$inst"); } + return null; } function find_files(string $dir, bool $is_ext_dir = false, bool $ignore = false): void @@ -1065,9 +1024,9 @@ function find_files(string $dir, bool $is_ext_dir = false, bool $ignore = false) while (($name = readdir($o)) !== false) { if (is_dir("{$dir}/{$name}") && !in_array($name, ['.', '..', '.svn'])) { - $skip_ext = ($is_ext_dir && !in_array(strtolower($name), $exts_to_test)); + $skip_ext = ($is_ext_dir && !in_array($name, $exts_to_test)); if ($skip_ext) { - $exts_skipped++; + $exts_skipped[] = $name; } find_files("{$dir}/{$name}", false, $ignore || $skip_ext); } @@ -1079,11 +1038,13 @@ function find_files(string $dir, bool $is_ext_dir = false, bool $ignore = false) } // Otherwise we're only interested in *.phpt files. - if (substr($name, -5) == '.phpt') { + // (but not those starting with a dot, which are hidden on + // many platforms) + if (substr($name, -5) == '.phpt' && substr($name, 0, 1) !== '.') { + $testfile = realpath("{$dir}/{$name}"); if ($ignore) { - $ignored_by_ext++; + $ignored_by_ext[] = $testfile; } else { - $testfile = realpath("{$dir}/{$name}"); $test_files[] = $testfile; } } @@ -1099,9 +1060,9 @@ function test_name($name): string { if (is_array($name)) { return $name[0] . ':' . $name[1]; - } else { - return $name; } + + return $name; } /** * @param array|string $a @@ -1119,69 +1080,21 @@ function test_sort($a, $b): int if ($ta == $tb) { return strcmp($a, $b); - } else { - return $tb - $ta; - } -} - -// -// Send Email to QA Team -// - -function mail_qa_team(string $data, bool $status = false): bool -{ - $url_bits = parse_url(QA_SUBMISSION_PAGE); - - if ($proxy = getenv('http_proxy')) { - $proxy = parse_url($proxy); - $path = $url_bits['host'] . $url_bits['path']; - $host = $proxy['host']; - if (empty($proxy['port'])) { - $proxy['port'] = 80; - } - $port = $proxy['port']; - } else { - $path = $url_bits['path']; - $host = $url_bits['host']; - $port = empty($url_bits['port']) ? 80 : $port = $url_bits['port']; - } - - $data = "php_test_data=" . urlencode(base64_encode(str_replace("\00", '[0x0]', $data))); - $data_length = strlen($data); - - $fs = fsockopen($host, $port, $errno, $errstr, 10); - - if (!$fs) { - return false; } - $php_version = urlencode(TESTED_PHP_VERSION); - - echo "\nPosting to " . QA_SUBMISSION_PAGE . "\n"; - fwrite($fs, "POST " . $path . "?status=$status&version=$php_version HTTP/1.1\r\n"); - fwrite($fs, "Host: " . $host . "\r\n"); - fwrite($fs, "User-Agent: QA Browser 0.1\r\n"); - fwrite($fs, "Content-Type: application/x-www-form-urlencoded\r\n"); - fwrite($fs, "Content-Length: " . $data_length . "\r\n\r\n"); - fwrite($fs, $data); - fwrite($fs, "\r\n\r\n"); - fclose($fs); - - return true; + return $tb - $ta; } // -// Write the given text to a temporary file, and return the filename. +// Write the given text to a temporary file. // function save_text(string $filename, string $text, ?string $filename_copy = null): void { global $DETAILED; - if ($filename_copy && $filename_copy != $filename) { - if (file_put_contents($filename_copy, $text) === false) { - error("Cannot open file '" . $filename_copy . "' (save_text)"); - } + if ($filename_copy && $filename_copy != $filename && file_put_contents($filename_copy, $text) === false) { + error("Cannot open file '" . $filename_copy . "' (save_text)"); } if (file_put_contents($filename, $text) === false) { @@ -1231,6 +1144,13 @@ function system_with_timeout( ) { global $valgrind; + // when proc_open cmd is passed as a string (without bypass_shell=true option) the cmd goes thru shell + // and on Windows quotes are discarded, this is a fix to honor the quotes and allow values containing + // spaces like '"C:\Program Files\PHP\php.exe"' to be passed as 1 argument correctly + if (IS_WINDOWS) { + $commandline = 'start "" /b /wait ' . $commandline . ' & exit'; + } + $data = ''; $bin_env = []; @@ -1263,6 +1183,10 @@ function system_with_timeout( } $timeout = $valgrind ? 300 : ($env['TEST_TIMEOUT'] ?? 60); + /* ASAN can cause a ~2-3x slowdown. */ + if (isset($env['SKIP_ASAN'])) { + $timeout *= 3; + } while (true) { /* hide errors from interrupted syscalls */ @@ -1274,12 +1198,16 @@ function system_with_timeout( if ($n === false) { break; - } elseif ($n === 0) { + } + + if ($n === 0) { /* timed out */ $data .= "\n ** ERROR: process timed out **\n"; proc_terminate($proc, 9); return $data; - } elseif ($n > 0) { + } + + if ($n > 0) { if ($captureStdOut) { $line = fread($pipes[1], 8192); } elseif ($captureStdErr) { @@ -1311,23 +1239,35 @@ function system_with_timeout( return $data; } -/** - * @param string|array|null $redir_tested - */ -function run_all_tests(array $test_files, array $env, $redir_tested = null): void +function run_all_tests(array $test_files, array $env, ?string $redir_tested = null): void { - global $test_results, $failed_tests_file, $result_tests_file, $php, $test_idx, $file_cache; + global $test_results, $failed_tests_file, $result_tests_file, $php, $test_idx, $file_cache, $shuffle; + global $preload; // Parallel testing global $PHP_FAILED_TESTS, $workers, $workerID, $workerSock; - if ($file_cache !== null) { - /* Automatically skip opcache tests in --file-cache mode, - * because opcache generally doesn't expect those to run under file cache */ - $test_files = array_filter($test_files, function($test) { - return !is_string($test) || false === strpos($test, 'ext/opcache'); + if ($file_cache !== null || $preload) { + /* Automatically skip opcache tests in --file-cache and --preload mode, + * because opcache generally expects these to run under a default configuration. */ + $test_files = array_filter($test_files, function($test) use($preload) { + if (!is_string($test)) { + return true; + } + if (false !== strpos($test, 'ext/opcache')) { + return false; + } + if ($preload && false !== strpos($test, 'ext/zend_test/tests/observer')) { + return false; + } + return true; }); } + // To discover parallelization issues and order dependent tests it is useful to randomize the test order. + if ($shuffle) { + shuffle($test_files); + } + /* Ignore -jN if there is only one file to analyze. */ if ($workers !== null && count($test_files) > 1 && !$workerID) { run_all_tests_parallel($test_files, $env, $redir_tested); @@ -1382,12 +1322,11 @@ function run_all_tests(array $test_files, array $env, $redir_tested = null): voi } } -/** The heart of parallel testing. - * @param string|array|null $redir_tested - */ -function run_all_tests_parallel(array $test_files, array $env, $redir_tested): void +function run_all_tests_parallel(array $test_files, array $env, ?string $redir_tested): void { - global $workers, $test_idx, $test_cnt, $test_results, $failed_tests_file, $result_tests_file, $PHP_FAILED_TESTS, $shuffle, $SHOW_ONLY_GROUPS, $valgrind; + global $workers, $test_idx, $test_results, $failed_tests_file, $result_tests_file, $PHP_FAILED_TESTS, $shuffle, $valgrind, $show_progress; + + global $junit; // The PHP binary running run-tests.php, and run-tests.php itself // This PHP executable is *not* necessarily the same as the tested version @@ -1397,10 +1336,6 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v $workerProcs = []; $workerSocks = []; - echo "=====================================================================\n"; - echo "========= WELCOME TO THE FUTURE: run-tests PARALLEL EDITION =========\n"; - echo "=====================================================================\n"; - // Each test may specify a list of conflict keys. While a test that conflicts with // key K is running, no other test that conflicts with K may run. Conflict keys are // specified either in the --CONFLICTS-- section, or CONFLICTS file inside a directory. @@ -1438,11 +1373,8 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v // Some tests assume that they are executed in a certain order. We will be popping from // $test_files, so reverse its order here. This makes sure that order is preserved at least // for tests with a common conflict key. - $test_files = array_reverse($test_files); - - // To discover parallelization issues it is useful to randomize the test order. - if ($shuffle) { - shuffle($test_files); + if (!$shuffle) { + $test_files = array_reverse($test_files); } // Don't start more workers than test files. @@ -1467,11 +1399,11 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v $startTime = microtime(true); for ($i = 1; $i <= $workers; $i++) { $proc = proc_open( - $thisPHP . ' ' . escapeshellarg($thisScript), + [$thisPHP, $thisScript], [], // Inherit our stdin, stdout and stderr $pipes, null, - $_ENV + [ + $GLOBALS['environment'] + [ "TEST_PHP_WORKER" => $i, "TEST_PHP_URI" => $sockUri, ], @@ -1500,10 +1432,6 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v "constants" => [ "INIT_DIR" => INIT_DIR, "TEST_PHP_SRCDIR" => TEST_PHP_SRCDIR, - "PHP_QA_EMAIL" => PHP_QA_EMAIL, - "QA_SUBMISSION_PAGE" => QA_SUBMISSION_PAGE, - "QA_REPORTS_PAGE" => QA_REPORTS_PAGE, - "TRAVIS_CI" => TRAVIS_CI ] ])) . "\n"; @@ -1555,6 +1483,10 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v kill_children($workerProcs); error("Could not find worker stdout in array of worker stdouts, THIS SHOULD NOT HAPPEN."); } + if (feof($workerSock)) { + kill_children($workerProcs); + error("Worker $i died unexpectedly"); + } while (false !== ($rawMessage = fgets($workerSock))) { // work around fgets truncating things if (($rawMessageBuffers[$i] ?? '') !== '') { @@ -1587,9 +1519,7 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v } } } - if (junit_enabled()) { - junit_merge_results($message["junit"]); - } + $junit->mergeResults($message["junit"]); // no break case "ready": // Schedule sequential tests only once we are down to one worker. @@ -1630,8 +1560,7 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v ]); } else { proc_terminate($workerProcs[$i]); - unset($workerProcs[$i]); - unset($workerSocks[$i]); + unset($workerProcs[$i], $workerSocks[$i]); goto escape; } break; @@ -1642,13 +1571,13 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v } $test_idx++; - if (!$SHOW_ONLY_GROUPS) { + if ($show_progress) { clear_show_test(); } echo $resultText; - if (!$SHOW_ONLY_GROUPS) { + if ($show_progress) { show_test($test_idx, count($workerProcs) . "/$workers concurrent test workers running"); } @@ -1681,7 +1610,6 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v 'E_USER_ERROR', 'E_USER_WARNING', 'E_USER_NOTICE', - 'E_STRICT', // TODO Cleanup when removed from Zend Engine. 'E_RECOVERABLE_ERROR', 'E_DEPRECATED', 'E_USER_DEPRECATED' @@ -1698,7 +1626,7 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v } } - if (!$SHOW_ONLY_GROUPS) { + if ($show_progress) { clear_show_test(); } @@ -1709,11 +1637,47 @@ function run_all_tests_parallel(array $test_files, array $env, $redir_tested): v } } +/** + * Calls fwrite and retries when network writes fail with errors such as "Resource temporarily unavailable" + * + * @param resource $stream the stream to fwrite to + * @param string $data + * @return int|false + */ +function safe_fwrite($stream, string $data) +{ + // safe_fwrite was tested by adding $message['unused'] = str_repeat('a', 20_000_000); in send_message() + // fwrites on tcp sockets can return false or less than strlen if the recipient is busy. + // (e.g. fwrite(): Send of 577 bytes failed with errno=35 Resource temporarily unavailable) + $bytes_written = 0; + while ($bytes_written < strlen($data)) { + $n = @fwrite($stream, substr($data, $bytes_written)); + if ($n === false) { + $write_streams = [$stream]; + $read_streams = []; + $except_streams = []; + /* Wait for up to 10 seconds for the stream to be ready to write again. */ + $result = stream_select($read_streams, $write_streams, $except_streams, 10); + if (!$result) { + echo "ERROR: send_message() stream_select() failed\n"; + return false; + } + $n = @fwrite($stream, substr($data, $bytes_written)); + if ($n === false) { + echo "ERROR: send_message() Failed to write chunk after stream_select: " . error_get_last()['message'] . "\n"; + return false; + } + } + $bytes_written += $n; + } + return $bytes_written; +} + function send_message($stream, array $message): void { $blocking = stream_get_meta_data($stream)["blocked"]; stream_set_blocking($stream, true); - fwrite($stream, base64_encode(serialize($message)) . "\n"); + safe_fwrite($stream, base64_encode(serialize($message)) . "\n"); stream_set_blocking($stream, $blocking); } @@ -1730,6 +1694,8 @@ function run_worker(): void { global $workerID, $workerSock; + global $junit; + $sockUri = getenv("TEST_PHP_URI"); $workerSock = stream_socket_client($sockUri, $_, $_, 5) or error("Couldn't connect to $sockUri"); @@ -1776,9 +1742,9 @@ function run_worker(): void run_all_tests($command["test_files"], $command["env"], $command["redir_tested"]); send_message($workerSock, [ "type" => "tests_finished", - "junit" => junit_enabled() ? $GLOBALS['JUNIT'] : null, + "junit" => $junit->isEnabled() ? $junit : null, ]); - junit_init(); + $junit->clear(); break; default: send_message($workerSock, [ @@ -1814,6 +1780,16 @@ function show_file_block(string $file, string $block, ?string $section = null): } } +function skip_test(string $tested, string $tested_file, string $shortname, string $reason): string +{ + global $junit; + + show_result('SKIP', $tested, $tested_file, "reason: $reason"); + $junit->initSuite($junit->getSuiteName($shortname)); + $junit->markTestAs('SKIP', $shortname, $tested, 0, $reason); + return 'SKIPPED'; +} + // // Run an individual test case. // @@ -1830,19 +1806,32 @@ function run_test(string $php, $file, array $env): string global $no_file_cache; global $slow_min_ms; global $preload, $file_cache; + global $num_repeats; // Parallel testing global $workerID; - $temp_filenames = null; - $org_file = $file; + global $show_progress; - if (isset($env['TEST_PHP_CGI_EXECUTABLE'])) { - $php_cgi = $env['TEST_PHP_CGI_EXECUTABLE']; - } + // Temporary + /** @var JUnit $junit */ + global $junit; - if (isset($env['TEST_PHPDBG_EXECUTABLE'])) { - $phpdbg = $env['TEST_PHPDBG_EXECUTABLE']; + static $skipCache; + if (!$skipCache) { + $enableSkipCache = !($env['DISABLE_SKIP_CACHE'] ?? '0'); + $skipCache = new SkipCache($enableSkipCache, $cfg['keep']['skip']); } + $orig_php = $php; + $php = escapeshellarg($php); + + $retried = false; +retry: + + $org_file = $file; + + $php_cgi = $env['TEST_PHP_CGI_EXECUTABLE'] ?? null; + $phpdbg = $env['TEST_PHPDBG_EXECUTABLE'] ?? null; + if (is_array($file)) { $file = $file[0]; } @@ -1854,136 +1843,38 @@ function run_test(string $php, $file, array $env): string "; } - // Load the sections of the test file. - $section_text = ['TEST' => '']; - - $fp = fopen($file, "rb") or error("Cannot open test file: $file"); - - $bork_info = null; - - if (!feof($fp)) { - $line = fgets($fp); - - if ($line === false) { - $bork_info = "cannot read test"; - } - } else { - $bork_info = "empty test [$file]"; - } - if ($bork_info === null && strncmp('--TEST--', $line, 8)) { - $bork_info = "tests must start with --TEST-- [$file]"; - } - - $section = 'TEST'; - $secfile = false; - $secdone = false; - - while (!feof($fp)) { - $line = fgets($fp); - - if ($line === false) { - break; - } - - // Match the beginning of a section. - if (preg_match('/^--([_A-Z]+)--/', $line, $r)) { - $section = (string) $r[1]; - - if (isset($section_text[$section]) && $section_text[$section]) { - $bork_info = "duplicated $section section"; - } - - // check for unknown sections - if (!in_array($section, [ - 'EXPECT', 'EXPECTF', 'EXPECTREGEX', 'EXPECTREGEX_EXTERNAL', 'EXPECT_EXTERNAL', 'EXPECTF_EXTERNAL', 'EXPECTHEADERS', - 'POST', 'POST_RAW', 'GZIP_POST', 'DEFLATE_POST', 'PUT', 'GET', 'COOKIE', 'ARGS', - 'FILE', 'FILEEOF', 'FILE_EXTERNAL', 'REDIRECTTEST', - 'CAPTURE_STDIO', 'STDIN', 'CGI', 'PHPDBG', - 'INI', 'ENV', 'EXTENSIONS', - 'SKIPIF', 'XFAIL', 'XLEAK', 'CLEAN', - 'CREDITS', 'DESCRIPTION', 'CONFLICTS', 'WHITESPACE_SENSITIVE', - ])) { - $bork_info = 'Unknown section "' . $section . '"'; - } - - $section_text[$section] = ''; - $secfile = $section == 'FILE' || $section == 'FILEEOF' || $section == 'FILE_EXTERNAL'; - $secdone = false; - continue; - } - - // Add to the section text. - if (!$secdone) { - $section_text[$section] .= $line; - } - - // End of actual test? - if ($secfile && preg_match('/^===DONE===\s*$/', $line)) { - $secdone = true; - } - } - - // the redirect section allows a set of tests to be reused outside of - // a given test dir - if ($bork_info === null) { - if (isset($section_text['REDIRECTTEST'])) { - if ($IN_REDIRECT) { - $bork_info = "Can't redirect a test from within a redirected test"; - } - } else { - if (!isset($section_text['PHPDBG']) && isset($section_text['FILE']) + isset($section_text['FILEEOF']) + isset($section_text['FILE_EXTERNAL']) != 1) { - $bork_info = "missing section --FILE--"; - } - - if (isset($section_text['FILEEOF'])) { - $section_text['FILE'] = preg_replace("/[\r\n]+$/", '', $section_text['FILEEOF']); - unset($section_text['FILEEOF']); - } - - foreach (['FILE', 'EXPECT', 'EXPECTF', 'EXPECTREGEX'] as $prefix) { - $key = $prefix . '_EXTERNAL'; - - if (isset($section_text[$key])) { - // don't allow tests to retrieve files from anywhere but this subdirectory - $section_text[$key] = dirname($file) . '/' . trim(str_replace('..', '', $section_text[$key])); - - if (file_exists($section_text[$key])) { - $section_text[$prefix] = file_get_contents($section_text[$key]); - unset($section_text[$key]); - } else { - $bork_info = "could not load --" . $key . "-- " . dirname($file) . '/' . trim($section_text[$key]); - } - } - } - - if ((isset($section_text['EXPECT']) + isset($section_text['EXPECTF']) + isset($section_text['EXPECTREGEX'])) != 1) { - $bork_info = "missing section --EXPECT--, --EXPECTF-- or --EXPECTREGEX--"; - } - } - } - fclose($fp); - $shortname = str_replace(TEST_PHP_SRCDIR . '/', '', $file); $tested_file = $shortname; - if ($bork_info !== null) { - show_result("BORK", $bork_info, $tested_file); + try { + $test = new TestFile($file, (bool)$IN_REDIRECT); + } catch (BorkageException $ex) { + show_result("BORK", $ex->getMessage(), $tested_file); $PHP_FAILED_TESTS['BORKED'][] = [ 'name' => $file, 'test_name' => '', 'output' => '', 'diff' => '', - 'info' => "$bork_info [$file]", + 'info' => "{$ex->getMessage()} [$file]", ]; - junit_mark_test_as('BORK', $shortname, $tested_file, 0, $bork_info); + $junit->markTestAs('BORK', $shortname, $tested_file, 0, $ex->getMessage()); return 'BORKED'; } - if (isset($section_text['CAPTURE_STDIO'])) { - $captureStdIn = stripos($section_text['CAPTURE_STDIO'], 'STDIN') !== false; - $captureStdOut = stripos($section_text['CAPTURE_STDIO'], 'STDOUT') !== false; - $captureStdErr = stripos($section_text['CAPTURE_STDIO'], 'STDERR') !== false; + $tested = $test->getName(); + + if ($test->hasSection('FILE_EXTERNAL')) { + if ($num_repeats > 1) { + return skip_test($tested, $tested_file, $shortname, 'Test with FILE_EXTERNAL might not be repeatable'); + } + } + + if ($test->hasSection('CAPTURE_STDIO')) { + $capture = $test->getSection('CAPTURE_STDIO'); + $captureStdIn = stripos($capture, 'STDIN') !== false; + $captureStdOut = stripos($capture, 'STDOUT') !== false; + $captureStdErr = stripos($capture, 'STDERR') !== false; } else { $captureStdIn = true; $captureStdOut = true; @@ -1995,55 +1886,45 @@ function run_test(string $php, $file, array $env): string $cmdRedirect = ''; } - $tested = trim($section_text['TEST']); - /* For GET/POST/PUT tests, check if cgi sapi is available and if it is, use it. */ - if (array_key_exists('CGI', $section_text) || !empty($section_text['GET']) || !empty($section_text['POST']) || !empty($section_text['GZIP_POST']) || !empty($section_text['DEFLATE_POST']) || !empty($section_text['POST_RAW']) || !empty($section_text['PUT']) || !empty($section_text['COOKIE']) || !empty($section_text['EXPECTHEADERS'])) { - if (isset($php_cgi)) { - $php = $php_cgi . ' -C '; - } elseif (IS_WINDOWS && file_exists(dirname($php) . "/php-cgi.exe")) { - $php = realpath(dirname($php) . "/php-cgi.exe") . ' -C '; - } else { - if (file_exists(dirname($php) . "/../../sapi/cgi/php-cgi")) { - $php = realpath(dirname($php) . "/../../sapi/cgi/php-cgi") . ' -C '; - } elseif (file_exists("./sapi/cgi/php-cgi")) { - $php = realpath("./sapi/cgi/php-cgi") . ' -C '; - } elseif (file_exists(dirname($php) . "/php-cgi")) { - $php = realpath(dirname($php) . "/php-cgi") . ' -C '; - } else { - show_result('SKIP', $tested, $tested_file, "reason: CGI not available"); - - junit_init_suite(junit_get_suitename_for($shortname)); - junit_mark_test_as('SKIP', $shortname, $tested, 0, 'CGI not available'); - return 'SKIPPED'; - } + $uses_cgi = false; + if ($test->isCGI()) { + if (!$php_cgi) { + return skip_test($tested, $tested_file, $shortname, 'CGI not available'); } + $php = escapeshellarg($php_cgi) . ' -C '; $uses_cgi = true; + if ($num_repeats > 1) { + return skip_test($tested, $tested_file, $shortname, 'CGI does not support --repeat'); + } } /* For phpdbg tests, check if phpdbg sapi is available and if it is, use it. */ $extra_options = ''; - if (array_key_exists('PHPDBG', $section_text)) { - if (!isset($section_text['STDIN'])) { - $section_text['STDIN'] = $section_text['PHPDBG'] . "\n"; - } - + if ($test->hasSection('PHPDBG')) { if (isset($phpdbg)) { - $php = $phpdbg . ' -qIb'; + $php = escapeshellarg($phpdbg) . ' -qIb'; // Additional phpdbg command line options for sections that need to // be run straight away. For example, EXTENSIONS, SKIPIF, CLEAN. $extra_options = '-rr'; } else { - show_result('SKIP', $tested, $tested_file, "reason: phpdbg not available"); + return skip_test($tested, $tested_file, $shortname, 'phpdbg not available'); + } + if ($num_repeats > 1) { + return skip_test($tested, $tested_file, $shortname, 'phpdbg does not support --repeat'); + } + } - junit_init_suite(junit_get_suitename_for($shortname)); - junit_mark_test_as('SKIP', $shortname, $tested, 0, 'phpdbg not available'); - return 'SKIPPED'; + foreach (['CLEAN', 'STDIN', 'CAPTURE_STDIO'] as $section) { + if ($test->hasSection($section)) { + if ($num_repeats > 1) { + return skip_test($tested, $tested_file, $shortname, "Test with $section might not be repeatable"); + } } } - if (!$SHOW_ONLY_GROUPS && !$workerID) { + if ($show_progress && !$workerID) { show_test($test_idx, $shortname); } @@ -2062,6 +1943,7 @@ function run_test(string $php, $file, array $env): string $diff_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'diff'; $log_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'log'; $exp_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'exp'; + $stdin_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'stdin'; $output_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'out'; $memcheck_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'mem'; $sh_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'sh'; @@ -2079,32 +1961,19 @@ function run_test(string $php, $file, array $env): string $temp_skipif .= 's'; $temp_file .= 's'; $temp_clean .= 's'; - $copy_file = $temp_dir . DIRECTORY_SEPARATOR . basename(is_array($file) ? $file[1] : $file) . '.phps'; + $copy_file = $temp_dir . DIRECTORY_SEPARATOR . basename($file) . '.phps'; if (!is_dir(dirname($copy_file))) { mkdir(dirname($copy_file), 0777, true) or error("Cannot create output directory - " . dirname($copy_file)); } - if (isset($section_text['FILE'])) { - save_text($copy_file, $section_text['FILE']); + if ($test->hasSection('FILE')) { + save_text($copy_file, $test->getSection('FILE')); } - - $temp_filenames = [ - 'file' => $copy_file, - 'diff' => $diff_filename, - 'log' => $log_filename, - 'exp' => $exp_filename, - 'out' => $output_filename, - 'mem' => $memcheck_filename, - 'sh' => $sh_filename, - 'php' => $temp_file, - 'skip' => $temp_skipif, - 'clean' => $temp_clean - ]; } if (is_array($IN_REDIRECT)) { - $tested = $IN_REDIRECT['prefix'] . ' ' . trim($section_text['TEST']); + $tested = $IN_REDIRECT['prefix'] . ' ' . $tested; $tested_file = $tmp_relative_file; $shortname = str_replace(TEST_PHP_SRCDIR . '/', '', $tested_file); } @@ -2113,6 +1982,7 @@ function run_test(string $php, $file, array $env): string @unlink($diff_filename); @unlink($log_filename); @unlink($exp_filename); + @unlink($stdin_filename); @unlink($output_filename); @unlink($memcheck_filename); @unlink($sh_filename); @@ -2135,8 +2005,9 @@ function run_test(string $php, $file, array $env): string $env['CONTENT_LENGTH'] = ''; $env['TZ'] = ''; - if (!empty($section_text['ENV'])) { - foreach (explode("\n", trim($section_text['ENV'])) as $e) { + if ($test->sectionNotEmpty('ENV')) { + $env_str = str_replace('{PWD}', dirname($file), $test->getSection('ENV')); + foreach (explode("\n", $env_str) as $e) { $e = explode('=', trim($e), 2); if (!empty($e[0]) && isset($e[1])) { @@ -2149,23 +2020,41 @@ function run_test(string $php, $file, array $env): string $ini_settings = $workerID ? ['opcache.cache_id' => "worker$workerID"] : []; // Additional required extensions - if (array_key_exists('EXTENSIONS', $section_text)) { + $extensions = []; + if ($test->hasSection('EXTENSIONS')) { + $extensions = preg_split("/[\n\r]+/", trim($test->getSection('EXTENSIONS'))); + } + if (is_array($IN_REDIRECT) && $IN_REDIRECT['EXTENSIONS'] != []) { + $extensions = array_merge($extensions, $IN_REDIRECT['EXTENSIONS']); + } + + /* Load required extensions */ + if ($extensions != []) { $ext_params = []; settings2array($ini_overwrites, $ext_params); $ext_params = settings2params($ext_params); - $ext_dir = `$php $pass_options $extra_options $ext_params $no_file_cache -d display_errors=0 -r "echo ini_get('extension_dir');"`; - $extensions = preg_split("/[\n\r]+/", trim($section_text['EXTENSIONS'])); - $loaded = explode(",", `$php $pass_options $extra_options $ext_params $no_file_cache -d display_errors=0 -r "echo implode(',', get_loaded_extensions());"`); + [$ext_dir, $loaded] = $skipCache->getExtensions("$orig_php $pass_options $extra_options $ext_params $no_file_cache"); $ext_prefix = IS_WINDOWS ? "php_" : ""; + $missing = []; foreach ($extensions as $req_ext) { - if (!in_array($req_ext, $loaded)) { - if ($req_ext == 'opcache') { - $ini_settings['zend_extension'][] = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX; + if (!in_array($req_ext, $loaded, true)) { + if ($req_ext == 'opcache' || $req_ext == 'xdebug') { + $ext_file = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX; + $ini_settings['zend_extension'][] = $ext_file; } else { - $ini_settings['extension'][] = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX; + $ext_file = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX; + $ini_settings['extension'][] = $ext_file; + } + if (!is_readable($ext_file)) { + $missing[] = $req_ext; } } } + if ($missing) { + $message = 'Required extension' . (count($missing) > 1 ? 's' : '') + . ' missing: ' . implode(', ', $missing); + return skip_test($tested, $tested_file, $shortname, $message); + } } // additional ini overwrites @@ -2185,16 +2074,38 @@ function run_test(string $php, $file, array $env): string // even though all the files are re-created. $ini_settings['opcache.validate_timestamps'] = '0'; } + } else if ($num_repeats > 1) { + // Make sure warnings still show up on the second run. + $ini_settings['opcache.record_warnings'] = '1'; } // Any special ini settings // these may overwrite the test defaults... - if (array_key_exists('INI', $section_text)) { - $section_text['INI'] = str_replace('{PWD}', dirname($file), $section_text['INI']); - $section_text['INI'] = str_replace('{TMP}', sys_get_temp_dir(), $section_text['INI']); + if ($test->hasSection('INI')) { + $ini = str_replace('{PWD}', dirname($file), $test->getSection('INI')); + $ini = str_replace('{TMP}', sys_get_temp_dir(), $ini); $replacement = IS_WINDOWS ? '"' . PHP_BINARY . ' -r \"while ($in = fgets(STDIN)) echo $in;\" > $1"' : 'tee $1 >/dev/null'; - $section_text['INI'] = preg_replace('/{MAIL:(\S+)}/', $replacement, $section_text['INI']); - settings2array(preg_split("/[\n\r]+/", $section_text['INI']), $ini_settings); + $ini = preg_replace('/{MAIL:(\S+)}/', $replacement, $ini); + $skip = false; + $ini = preg_replace_callback('/{ENV:(\S+)}/', function ($m) use (&$skip) { + $name = $m[1]; + $value = getenv($name); + if ($value === false) { + $skip = sprintf('Environment variable %s is not set', $name); + return ''; + } + return $value; + }, $ini); + if ($skip !== false) { + return skip_test($tested, $tested_file, $shortname, $skip); + } + settings2array(preg_split("/[\n\r]+/", $ini), $ini_settings); + + if (isset($ini_settings['opcache.opt_debug_level'])) { + if ($num_repeats > 1) { + return skip_test($tested, $tested_file, $shortname, 'opt_debug_level tests are not repeatable'); + } + } } $ini_settings = settings2params($ini_settings); @@ -2205,85 +2116,95 @@ function run_test(string $php, $file, array $env): string $info = ''; $warn = false; - if (array_key_exists('SKIPIF', $section_text)) { - if (trim($section_text['SKIPIF'])) { - show_file_block('skip', $section_text['SKIPIF']); - save_text($test_skipif, $section_text['SKIPIF'], $temp_skipif); - $extra = !IS_WINDOWS ? - "unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;" : ""; + if ($test->sectionNotEmpty('SKIPIF')) { + show_file_block('skip', $test->getSection('SKIPIF')); + $extra = !IS_WINDOWS ? + "unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;" : ""; - if ($valgrind) { - $env['USE_ZEND_ALLOC'] = '0'; - $env['ZEND_DONT_UNLOAD_MODULES'] = 1; - } + if ($valgrind) { + $env['USE_ZEND_ALLOC'] = '0'; + $env['ZEND_DONT_UNLOAD_MODULES'] = 1; + } - junit_start_timer($shortname); + $junit->startTimer($shortname); - $output = system_with_timeout("$extra $php $pass_options $extra_options -q $orig_ini_settings $no_file_cache -d display_errors=1 -d display_startup_errors=0 \"$test_skipif\"", $env); - $output = trim($output); + $startTime = microtime(true); + $commandLine = "$extra $php $pass_options $extra_options -q $orig_ini_settings $no_file_cache -d display_errors=1 -d display_startup_errors=0"; + $output = $skipCache->checkSkip($commandLine, $test->getSection('SKIPIF'), $test_skipif, $temp_skipif, $env); - junit_finish_timer($shortname); + $time = microtime(true) - $startTime; + $junit->stopTimer($shortname); - if (!$cfg['keep']['skip']) { - @unlink($test_skipif); - } + if ($time > $slow_min_ms / 1000) { + $PHP_FAILED_TESTS['SLOW'][] = [ + 'name' => $file, + 'test_name' => 'SKIPIF of ' . $tested . " [$tested_file]", + 'output' => '', + 'diff' => '', + 'info' => $time, + ]; + } - if (!strncasecmp('skip', $output, 4)) { - if (preg_match('/^skip\s*(.+)/i', $output, $m)) { - show_result('SKIP', $tested, $tested_file, "reason: $m[1]", $temp_filenames); - } else { - show_result('SKIP', $tested, $tested_file, '', $temp_filenames); - } + if (!$cfg['keep']['skip']) { + @unlink($test_skipif); + } - if (!$cfg['keep']['skip']) { - @unlink($test_skipif); - } + if (!strncasecmp('skip', $output, 4)) { + if (preg_match('/^skip\s*(.+)/i', $output, $m)) { + show_result('SKIP', $tested, $tested_file, "reason: $m[1]"); + } else { + show_result('SKIP', $tested, $tested_file, ''); + } - $message = !empty($m[1]) ? $m[1] : ''; - junit_mark_test_as('SKIP', $shortname, $tested, null, $message); - return 'SKIPPED'; - } - - if (!strncasecmp('info', $output, 4) && preg_match('/^info\s*(.+)/i', $output, $m)) { - $info = " (info: $m[1])"; - } elseif (!strncasecmp('warn', $output, 4) && preg_match('/^warn\s+(.+)/i', $output, $m)) { - $warn = true; /* only if there is a reason */ - $info = " (warn: $m[1])"; - } elseif (!strncasecmp('xfail', $output, 5)) { - // Pretend we have an XFAIL section - $section_text['XFAIL'] = ltrim(substr($output, 5)); - } elseif ($output !== '') { - show_result("BORK", $output, $tested_file, 'reason: invalid output from SKIPIF', $temp_filenames); - $PHP_FAILED_TESTS['BORKED'][] = [ - 'name' => $file, - 'test_name' => '', - 'output' => '', - 'diff' => '', - 'info' => "$output [$file]", - ]; + $message = !empty($m[1]) ? $m[1] : ''; + $junit->markTestAs('SKIP', $shortname, $tested, null, $message); + return 'SKIPPED'; + } - junit_mark_test_as('BORK', $shortname, $tested, null, $output); - return 'BORKED'; - } + if (!strncasecmp('info', $output, 4) && preg_match('/^info\s*(.+)/i', $output, $m)) { + $info = " (info: $m[1])"; + } elseif (!strncasecmp('warn', $output, 4) && preg_match('/^warn\s+(.+)/i', $output, $m)) { + $warn = true; /* only if there is a reason */ + $info = " (warn: $m[1])"; + } elseif (!strncasecmp('xfail', $output, 5)) { + // Pretend we have an XFAIL section + $test->setSection('XFAIL', ltrim(substr($output, 5))); + } elseif (!strncasecmp('xleak', $output, 5)) { + // Pretend we have an XLEAK section + $test->setSection('XLEAK', ltrim(substr($output, 5))); + } elseif (!strncasecmp('flaky', $output, 5)) { + // Pretend we have a FLAKY section + $test->setSection('FLAKY', ltrim(substr($output, 5))); + } elseif ($output !== '') { + show_result("BORK", $output, $tested_file, 'reason: invalid output from SKIPIF'); + $PHP_FAILED_TESTS['BORKED'][] = [ + 'name' => $file, + 'test_name' => '', + 'output' => '', + 'diff' => '', + 'info' => "$output [$file]", + ]; + + $junit->markTestAs('BORK', $shortname, $tested, null, $output); + return 'BORKED'; } } - if (!extension_loaded("zlib") - && (array_key_exists("GZIP_POST", $section_text) - || array_key_exists("DEFLATE_POST", $section_text))) { + if (!extension_loaded("zlib") && $test->hasAnySections("GZIP_POST", "DEFLATE_POST")) { $message = "ext/zlib required"; - show_result('SKIP', $tested, $tested_file, "reason: $message", $temp_filenames); - junit_mark_test_as('SKIP', $shortname, $tested, null, $message); + show_result('SKIP', $tested, $tested_file, "reason: $message"); + $junit->markTestAs('SKIP', $shortname, $tested, null, $message); return 'SKIPPED'; } - if (isset($section_text['REDIRECTTEST'])) { + if ($test->hasSection('REDIRECTTEST')) { $test_files = []; - $IN_REDIRECT = eval($section_text['REDIRECTTEST']); + $IN_REDIRECT = eval($test->getSection('REDIRECTTEST')); $IN_REDIRECT['via'] = "via [$shortname]\n\t"; $IN_REDIRECT['dir'] = realpath(dirname($file)); - $IN_REDIRECT['prefix'] = trim($section_text['TEST']); + $IN_REDIRECT['prefix'] = $tested; + $IN_REDIRECT['EXTENSIONS'] = $extensions; if (!empty($IN_REDIRECT['TESTS'])) { if (is_array($org_file)) { @@ -2313,28 +2234,28 @@ function run_test(string $php, $file, array $env): string // a redirected test never fails $IN_REDIRECT = false; - junit_mark_test_as('PASS', $shortname, $tested); + $junit->markTestAs('PASS', $shortname, $tested); return 'REDIR'; - } else { - $bork_info = "Redirect info must contain exactly one TEST string to be used as redirect directory."; - show_result("BORK", $bork_info, '', '', $temp_filenames); - $PHP_FAILED_TESTS['BORKED'][] = [ - 'name' => $file, - 'test_name' => '', - 'output' => '', - 'diff' => '', - 'info' => "$bork_info [$file]", - ]; } + + $bork_info = "Redirect info must contain exactly one TEST string to be used as redirect directory."; + show_result("BORK", $bork_info, '', ''); + $PHP_FAILED_TESTS['BORKED'][] = [ + 'name' => $file, + 'test_name' => '', + 'output' => '', + 'diff' => '', + 'info' => "$bork_info [$file]", + ]; } - if (is_array($org_file) || isset($section_text['REDIRECTTEST'])) { + if (is_array($org_file) || $test->hasSection('REDIRECTTEST')) { if (is_array($org_file)) { $file = $org_file[0]; } $bork_info = "Redirected test did not contain redirection info"; - show_result("BORK", $bork_info, '', '', $temp_filenames); + show_result("BORK", $bork_info, '', ''); $PHP_FAILED_TESTS['BORKED'][] = [ 'name' => $file, 'test_name' => '', @@ -2343,21 +2264,21 @@ function run_test(string $php, $file, array $env): string 'info' => "$bork_info [$file]", ]; - junit_mark_test_as('BORK', $shortname, $tested, null, $bork_info); + $junit->markTestAs('BORK', $shortname, $tested, null, $bork_info); return 'BORKED'; } // We've satisfied the preconditions - run the test! - if (isset($section_text['FILE'])) { - show_file_block('php', $section_text['FILE'], 'TEST'); - save_text($test_file, $section_text['FILE'], $temp_file); + if ($test->hasSection('FILE')) { + show_file_block('php', $test->getSection('FILE'), 'TEST'); + save_text($test_file, $test->getSection('FILE'), $temp_file); } else { - $test_file = $temp_file = ""; + $test_file = ""; } - if (array_key_exists('GET', $section_text)) { - $query_string = trim($section_text['GET']); + if ($test->hasSection('GET')) { + $query_string = trim($test->getSection('GET')); } else { $query_string = ''; } @@ -2373,13 +2294,13 @@ function run_test(string $php, $file, array $env): string $env['SCRIPT_FILENAME'] = $test_file; } - if (array_key_exists('COOKIE', $section_text)) { - $env['HTTP_COOKIE'] = trim($section_text['COOKIE']); + if ($test->hasSection('COOKIE')) { + $env['HTTP_COOKIE'] = trim($test->getSection('COOKIE')); } else { $env['HTTP_COOKIE'] = ''; } - $args = isset($section_text['ARGS']) ? ' -- ' . $section_text['ARGS'] : ''; + $args = $test->hasSection('ARGS') ? ' -- ' . $test->getSection('ARGS') : ''; if ($preload && !empty($test_file)) { save_text($preload_filename, "sectionNotEmpty('POST_RAW')) { + $post = trim($test->getSection('POST_RAW')); $raw_lines = explode("\n", $post); $request = ''; @@ -2411,17 +2332,19 @@ function run_test(string $php, $file, array $env): string } $env['CONTENT_LENGTH'] = strlen($request); - $env['REQUEST_METHOD'] = 'POST'; + if (empty($env['REQUEST_METHOD'])) { + $env['REQUEST_METHOD'] = 'POST'; + } if (empty($request)) { - junit_mark_test_as('BORK', $shortname, $tested, null, 'empty $request'); + $junit->markTestAs('BORK', $shortname, $tested, null, 'empty $request'); return 'BORKED'; } save_text($tmp_post, $request); $cmd = "$php $pass_options $ini_settings -f \"$test_file\"$cmdRedirect < \"$tmp_post\""; - } elseif (array_key_exists('PUT', $section_text) && !empty($section_text['PUT'])) { - $post = trim($section_text['PUT']); + } elseif ($test->sectionNotEmpty('PUT')) { + $post = trim($test->getSection('PUT')); $raw_lines = explode("\n", $post); $request = ''; @@ -2445,14 +2368,14 @@ function run_test(string $php, $file, array $env): string $env['REQUEST_METHOD'] = 'PUT'; if (empty($request)) { - junit_mark_test_as('BORK', $shortname, $tested, null, 'empty $request'); + $junit->markTestAs('BORK', $shortname, $tested, null, 'empty $request'); return 'BORKED'; } save_text($tmp_post, $request); $cmd = "$php $pass_options $ini_settings -f \"$test_file\"$cmdRedirect < \"$tmp_post\""; - } elseif (array_key_exists('POST', $section_text) && !empty($section_text['POST'])) { - $post = trim($section_text['POST']); + } elseif ($test->sectionNotEmpty('POST')) { + $post = trim($test->getSection('POST')); $content_length = strlen($post); save_text($tmp_post, $post); @@ -2466,8 +2389,8 @@ function run_test(string $php, $file, array $env): string } $cmd = "$php $pass_options $ini_settings -f \"$test_file\"$cmdRedirect < \"$tmp_post\""; - } elseif (array_key_exists('GZIP_POST', $section_text) && !empty($section_text['GZIP_POST'])) { - $post = trim($section_text['GZIP_POST']); + } elseif ($test->sectionNotEmpty('GZIP_POST')) { + $post = trim($test->getSection('GZIP_POST')); $post = gzencode($post, 9, FORCE_GZIP); $env['HTTP_CONTENT_ENCODING'] = 'gzip'; @@ -2479,8 +2402,8 @@ function run_test(string $php, $file, array $env): string $env['CONTENT_LENGTH'] = $content_length; $cmd = "$php $pass_options $ini_settings -f \"$test_file\"$cmdRedirect < \"$tmp_post\""; - } elseif (array_key_exists('DEFLATE_POST', $section_text) && !empty($section_text['DEFLATE_POST'])) { - $post = trim($section_text['DEFLATE_POST']); + } elseif ($test->sectionNotEmpty('DEFLATE_POST')) { + $post = trim($test->getSection('DEFLATE_POST')); $post = gzcompress($post, 9); $env['HTTP_CONTENT_ENCODING'] = 'deflate'; save_text($tmp_post, $post); @@ -2496,9 +2419,11 @@ function run_test(string $php, $file, array $env): string $env['CONTENT_TYPE'] = ''; $env['CONTENT_LENGTH'] = ''; - $cmd = "$php $pass_options $ini_settings -f \"$test_file\" $args$cmdRedirect"; + $repeat_option = $num_repeats > 1 ? "--repeat $num_repeats" : ""; + $cmd = "$php $pass_options $repeat_option $ini_settings -f \"$test_file\" $args$cmdRedirect"; } + $orig_cmd = $cmd; if ($valgrind) { $env['USE_ZEND_ALLOC'] = '0'; $env['ZEND_DONT_UNLOAD_MODULES'] = 1; @@ -2506,6 +2431,16 @@ function run_test(string $php, $file, array $env): string $cmd = $valgrind->wrapCommand($cmd, $memcheck_filename, strpos($test_file, "pcre") !== false); } + if ($test->hasSection('XLEAK')) { + $env['ZEND_ALLOC_PRINT_LEAKS'] = '0'; + if (isset($env['SKIP_ASAN'])) { + // $env['LSAN_OPTIONS'] = 'detect_leaks=0'; + /* For unknown reasons, LSAN_OPTIONS=detect_leaks=0 would occasionally not be picked up + * in CI. Skip the test with ASAN, as it's not worth investegating. */ + return skip_test($tested, $tested_file, $shortname, 'xleak does not work with asan'); + } + } + if ($DETAILED) { echo " CONTENT_LENGTH = " . $env['CONTENT_LENGTH'] . " @@ -2520,44 +2455,43 @@ function run_test(string $php, $file, array $env): string "; } - junit_start_timer($shortname); + $junit->startTimer($shortname); $hrtime = hrtime(); $startTime = $hrtime[0] * 1000000000 + $hrtime[1]; - $out = system_with_timeout($cmd, $env, $section_text['STDIN'] ?? null, $captureStdIn, $captureStdOut, $captureStdErr); + $stdin = $test->hasSection('STDIN') ? $test->getSection('STDIN') : null; + $out = system_with_timeout($cmd, $env, $stdin, $captureStdIn, $captureStdOut, $captureStdErr); - junit_finish_timer($shortname); + $junit->stopTimer($shortname); $hrtime = hrtime(); $time = $hrtime[0] * 1000000000 + $hrtime[1] - $startTime; if ($time >= $slow_min_ms * 1000000) { $PHP_FAILED_TESTS['SLOW'][] = [ 'name' => $file, - 'test_name' => (is_array($IN_REDIRECT) ? $IN_REDIRECT['via'] : '') . $tested . " [$tested_file]", + 'test_name' => $tested . " [$tested_file]", 'output' => '', 'diff' => '', 'info' => $time / 1000000000, ]; } - if (array_key_exists('CLEAN', $section_text) && (!$no_clean || $cfg['keep']['clean'])) { - if (trim($section_text['CLEAN'])) { - show_file_block('clean', $section_text['CLEAN']); - save_text($test_clean, trim($section_text['CLEAN']), $temp_clean); + // Remember CLEAN output to report borked test if it otherwise passes. + $clean_output = null; + if ((!$no_clean || $cfg['keep']['clean']) && $test->sectionNotEmpty('CLEAN')) { + show_file_block('clean', $test->getSection('CLEAN')); + save_text($test_clean, trim($test->getSection('CLEAN')), $temp_clean); - if (!$no_clean) { - $extra = !IS_WINDOWS ? - "unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;" : ""; - system_with_timeout("$extra $php $pass_options $extra_options -q $orig_ini_settings $no_file_cache \"$test_clean\"", $env); - } + if (!$no_clean) { + $extra = !IS_WINDOWS ? + "unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;" : ""; + $clean_output = system_with_timeout("$extra $orig_php $pass_options -q $orig_ini_settings $no_file_cache \"$test_clean\"", $env); + } - if (!$cfg['keep']['clean']) { - @unlink($test_clean); - } + if (!$cfg['keep']['clean']) { + @unlink($test_clean); } } - @unlink($preload_filename); - $leaked = false; $passed = false; @@ -2569,13 +2503,32 @@ function run_test(string $php, $file, array $env): string } } + if ($num_repeats > 1) { + // In repeat mode, retain the output before the first execution, + // and of the last execution. Do this early, because the trimming below + // makes the newline handling complicated. + $separator1 = "Executing for the first time...\n"; + $separator1_pos = strpos($out, $separator1); + if ($separator1_pos !== false) { + $separator2 = "Finished execution, repeating...\n"; + $separator2_pos = strrpos($out, $separator2); + if ($separator2_pos !== false) { + $out = substr($out, 0, $separator1_pos) + . substr($out, $separator2_pos + strlen($separator2)); + } else { + $out = substr($out, 0, $separator1_pos) + . substr($out, $separator1_pos + strlen($separator1)); + } + } + } + // Does the output match what is expected? $output = preg_replace("/\r\n/", "\n", trim($out)); /* when using CGI, strip the headers from the output */ $headers = []; - if (!empty($uses_cgi) && preg_match("/^(.*?)\r?\n\r?\n(.*)/s", $out, $match)) { + if ($uses_cgi && preg_match("/^(.*?)\r?\n\r?\n(.*)/s", $out, $match)) { $output = trim($match[2]); $rh = preg_split("/[\n\r]+/", $match[1]); @@ -2587,12 +2540,14 @@ function run_test(string $php, $file, array $env): string } } + $wanted_headers = null; + $output_headers = null; $failed_headers = false; - if (isset($section_text['EXPECTHEADERS'])) { + if ($test->hasSection('EXPECTHEADERS')) { $want = []; $wanted_headers = []; - $lines = preg_split("/[\n\r]+/", $section_text['EXPECTHEADERS']); + $lines = preg_split("/[\n\r]+/", $test->getSection('EXPECTHEADERS')); foreach ($lines as $line) { if (strpos($line, ':') !== false) { @@ -2614,9 +2569,7 @@ function run_test(string $php, $file, array $env): string } } - ksort($wanted_headers); $wanted_headers = implode("\n", $wanted_headers); - ksort($output_headers); $output_headers = implode("\n", $output_headers); } @@ -2626,111 +2579,79 @@ function run_test(string $php, $file, array $env): string $output = trim(preg_replace("/\n?Warning: Can't preload [^\n]*\n?/", "", $output)); } - if (isset($section_text['EXPECTF']) || isset($section_text['EXPECTREGEX'])) { - if (isset($section_text['EXPECTF'])) { - $wanted = trim($section_text['EXPECTF']); + if ($test->hasAnySections('EXPECTF', 'EXPECTREGEX')) { + if ($test->hasSection('EXPECTF')) { + $wanted = trim($test->getSection('EXPECTF')); } else { - $wanted = trim($section_text['EXPECTREGEX']); + $wanted = trim($test->getSection('EXPECTREGEX')); } show_file_block('exp', $wanted); $wanted_re = preg_replace('/\r\n/', "\n", $wanted); - if (isset($section_text['EXPECTF'])) { - // do preg_quote, but miss out any %r delimited sections - $temp = ""; - $r = "%r"; - $startOffset = 0; - $length = strlen($wanted_re); - while ($startOffset < $length) { - $start = strpos($wanted_re, $r, $startOffset); - if ($start !== false) { - // we have found a start tag - $end = strpos($wanted_re, $r, $start + 2); - if ($end === false) { - // unbalanced tag, ignore it. - $end = $start = $length; - } - } else { - // no more %r sections - $start = $end = $length; - } - // quote a non re portion of the string - $temp .= preg_quote(substr($wanted_re, $startOffset, $start - $startOffset), '/'); - // add the re unquoted. - if ($end > $start) { - $temp .= '(' . substr($wanted_re, $start + 2, $end - $start - 2) . ')'; - } - $startOffset = $end + 2; - } - $wanted_re = $temp; - - // Stick to basics - $wanted_re = str_replace('%e', '\\' . DIRECTORY_SEPARATOR, $wanted_re); - $wanted_re = str_replace('%s', '[^\r\n]+', $wanted_re); - $wanted_re = str_replace('%S', '[^\r\n]*', $wanted_re); - $wanted_re = str_replace('%a', '.+', $wanted_re); - $wanted_re = str_replace('%A', '.*', $wanted_re); - $wanted_re = str_replace('%w', '\s*', $wanted_re); - $wanted_re = str_replace('%i', '[+-]?\d+', $wanted_re); - $wanted_re = str_replace('%d', '\d+', $wanted_re); - $wanted_re = str_replace('%x', '[0-9a-fA-F]+', $wanted_re); - $wanted_re = str_replace('%f', '[+-]?\.?\d+\.?\d*(?:[Ee][+-]?\d+)?', $wanted_re); - $wanted_re = str_replace('%c', '.', $wanted_re); - // %f allows two points "-.0.0" but that is the best *simple* expression - } - - if (preg_match("/^$wanted_re\$/s", $output)) { + if ($test->hasSection('EXPECTF')) { + $wanted_re = expectf_to_regex($wanted_re); + } + + if (preg_match('/^' . $wanted_re . '$/s', $output)) { $passed = true; - if (!$cfg['keep']['php']) { - @unlink($test_file); - } - @unlink($tmp_post); - - if (!$leaked && !$failed_headers) { - if (isset($section_text['XFAIL'])) { - $warn = true; - $info = " (warn: XFAIL section but test passes)"; - } elseif (isset($section_text['XLEAK'])) { - $warn = true; - $info = " (warn: XLEAK section but test passes)"; - } else { - show_result("PASS", $tested, $tested_file, '', $temp_filenames); - junit_mark_test_as('PASS', $shortname, $tested); - return 'PASSED'; - } - } } } else { - $wanted = trim($section_text['EXPECT']); + $wanted = trim($test->getSection('EXPECT')); $wanted = preg_replace('/\r\n/', "\n", $wanted); show_file_block('exp', $wanted); // compare and leave on success if (!strcmp($output, $wanted)) { $passed = true; + } + + $wanted_re = null; + } + if (!$passed && !$retried && error_may_be_retried($test, $output)) { + $retried = true; + goto retry; + } + + if ($passed) { + if (!$cfg['keep']['php'] && !$leaked) { + @unlink($test_file); + @unlink($preload_filename); + } + @unlink($tmp_post); + + if (!$leaked && !$failed_headers) { + // If the test passed and CLEAN produced output, report test as borked. + if ($clean_output) { + show_result("BORK", $output, $tested_file, 'reason: invalid output from CLEAN'); + $PHP_FAILED_TESTS['BORKED'][] = [ + 'name' => $file, + 'test_name' => '', + 'output' => '', + 'diff' => '', + 'info' => "$clean_output [$file]", + ]; - if (!$cfg['keep']['php']) { - @unlink($test_file); + $junit->markTestAs('BORK', $shortname, $tested, null, $clean_output); + return 'BORKED'; } - @unlink($tmp_post); - if (!$leaked && !$failed_headers) { - if (isset($section_text['XFAIL'])) { - $warn = true; - $info = " (warn: XFAIL section but test passes)"; - } elseif (isset($section_text['XLEAK'])) { - $warn = true; - $info = " (warn: XLEAK section but test passes)"; - } else { - show_result("PASS", $tested, $tested_file, '', $temp_filenames); - junit_mark_test_as('PASS', $shortname, $tested); - return 'PASSED'; - } + if ($test->hasSection('XFAIL')) { + $warn = true; + $info = " (warn: XFAIL section but test passes)"; + } elseif ($test->hasSection('XLEAK') && $valgrind) { + // XLEAK with ASAN completely disables LSAN so the test is expected to pass + $warn = true; + $info = " (warn: XLEAK section but test passes)"; + } elseif ($retried) { + $warn = true; + $info = " (warn: Test passed on retry attempt)"; + } else { + show_result("PASS", $tested, $tested_file, ''); + $junit->markTestAs('PASS', $shortname, $tested); + return 'PASSED'; } } - - $wanted_re = null; } // Test failed so we need to report details. @@ -2744,8 +2665,10 @@ function run_test(string $php, $file, array $env): string } } + $restype = []; + if ($leaked) { - $restype[] = isset($section_text['XLEAK']) ? + $restype[] = $test->hasSection('XLEAK') ? 'XLEAK' : 'LEAK'; } @@ -2754,12 +2677,13 @@ function run_test(string $php, $file, array $env): string } if (!$passed) { - if (isset($section_text['XFAIL'])) { + if ($test->hasSection('XFAIL')) { $restype[] = 'XFAIL'; - $info = ' XFAIL REASON: ' . rtrim($section_text['XFAIL']); - } elseif (isset($section_text['XLEAK'])) { + $info = ' XFAIL REASON: ' . rtrim($test->getSection('XFAIL')); + } elseif ($test->hasSection('XLEAK') && $valgrind) { + // XLEAK with ASAN completely disables LSAN so the test is expected to pass $restype[] = 'XLEAK'; - $info = ' XLEAK REASON: ' . rtrim($section_text['XLEAK']); + $info = ' XLEAK REASON: ' . rtrim($test->getSection('XLEAK')); } else { $restype[] = 'FAIL'; } @@ -2777,38 +2701,78 @@ function run_test(string $php, $file, array $env): string } // write .diff - $diff = generate_diff($wanted, $wanted_re, $output); + if (!empty($environment['TEST_PHP_DIFF_CMD'])) { + $diff = generate_diff_external($environment['TEST_PHP_DIFF_CMD'], $exp_filename, $output_filename); + } else { + $diff = generate_diff($wanted, $wanted_re, $output); + } + + // write .stdin + if ($test->hasSection('STDIN') || $test->hasSection('PHPDBG')) { + $stdin = $test->hasSection('STDIN') + ? $test->getSection('STDIN') + : $test->getSection('PHPDBG') . "\n"; + if (file_put_contents($stdin_filename, $stdin) === false) { + error("Cannot create test stdin - $stdin_filename"); + } + } + if (is_array($IN_REDIRECT)) { $orig_shortname = str_replace(TEST_PHP_SRCDIR . '/', '', $file); $diff = "# original source file: $orig_shortname\n" . $diff; } - show_file_block('diff', $diff); + if (!$SHOW_ONLY_GROUPS || array_intersect($restype, $SHOW_ONLY_GROUPS)) { + show_file_block('diff', $diff); + } if (strpos($log_format, 'D') !== false && file_put_contents($diff_filename, $diff) === false) { error("Cannot create test diff - $diff_filename"); } + // write .log + if (strpos($log_format, 'L') !== false && file_put_contents($log_filename, " +---- EXPECTED OUTPUT +$wanted +---- ACTUAL OUTPUT +$output +---- FAILED +") === false) { + error("Cannot create test log - $log_filename"); + error_report($file, $log_filename, $tested); + } + } + + if (!$passed || $leaked) { // write .sh if (strpos($log_format, 'S') !== false) { - $env_lines = []; + // Unset all environment variables so that we don't inherit extra + // ones from the parent process. + $env_lines = ['unset $(env | cut -d= -f1)']; foreach ($env as $env_var => $env_val) { + if (strval($env_val) === '') { + // proc_open does not pass empty env vars + continue; + } $env_lines[] = "export $env_var=" . escapeshellarg($env_val ?? ""); } - $exported_environment = $env_lines ? "\n" . implode("\n", $env_lines) . "\n" : ""; + $exported_environment = "\n" . implode("\n", $env_lines) . "\n"; $sh_script = <<', $diff); - junit_mark_test_as($restype, $shortname, $tested, null, $info, $diff); + $junit->markTestAs($restype, $shortname, $tested, null, $info, $diff); return $restype[0] . 'ED'; } -/** - * @return bool|int - */ -function comp_line(string $l1, string $l2, bool $is_reg) +function is_flaky(TestFile $test): bool { - if ($is_reg) { - return preg_match('/^' . $l1 . '$/s', $l2); - } else { - return !strcmp($l1, $l2); - } -} - -function count_array_diff( - array $ar1, - array $ar2, - bool $is_reg, - array $w, - int $idx1, - int $idx2, - int $cnt1, - int $cnt2, - int $steps -): int { - $equal = 0; - - while ($idx1 < $cnt1 && $idx2 < $cnt2 && comp_line($ar1[$idx1], $ar2[$idx2], $is_reg)) { - $idx1++; - $idx2++; - $equal++; - $steps--; + if ($test->hasSection('FLAKY')) { + return true; } - if (--$steps > 0) { - $eq1 = 0; - $st = $steps / 2; - - for ($ofs1 = $idx1 + 1; $ofs1 < $cnt1 && $st-- > 0; $ofs1++) { - $eq = @count_array_diff($ar1, $ar2, $is_reg, $w, $ofs1, $idx2, $cnt1, $cnt2, $st); - - if ($eq > $eq1) { - $eq1 = $eq; - } - } - - $eq2 = 0; - $st = $steps; - - for ($ofs2 = $idx2 + 1; $ofs2 < $cnt2 && $st-- > 0; $ofs2++) { - $eq = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1, $ofs2, $cnt1, $cnt2, $st); - if ($eq > $eq2) { - $eq2 = $eq; - } - } - - if ($eq1 > $eq2) { - $equal += $eq1; - } elseif ($eq2 > 0) { - $equal += $eq2; + if ($test->hasSection('SKIPIF')) { + if (strpos($test->getSection('SKIPIF'), 'SKIP_PERF_SENSITIVE') !== false) { + return true; } } + if (!$test->hasSection('FILE')) { + return false; + } + $file = $test->getSection('FILE'); + $flaky_functions = [ + 'disk_free_space', + 'hrtime', + 'microtime', + 'sleep', + 'usleep', + ]; + $regex = '(\b(' . implode('|', $flaky_functions) . ')\()i'; + return preg_match($regex, $file) === 1; +} - return $equal; +function is_flaky_output(string $output): bool +{ + $messages = [ + '404: page not found', + 'address already in use', + 'connection refused', + 'deadlock', + 'mailbox already exists', + 'timed out', + ]; + $regex = '(\b(' . implode('|', $messages) . ')\b)i'; + return preg_match($regex, $output) === 1; } -function generate_array_diff(array $ar1, array $ar2, bool $is_reg, array $w): array +function error_may_be_retried(TestFile $test, string $output): bool { - global $context_line_count; - $idx1 = 0; - $cnt1 = @count($ar1); - $idx2 = 0; - $cnt2 = @count($ar2); - $diff = []; - $old1 = []; - $old2 = []; - $number_len = max(3, strlen((string)max($cnt1 + 1, $cnt2 + 1))); - $line_number_spec = '%0' . $number_len . 'd'; - - /** Mapping from $idx2 to $idx1, including indexes of idx2 that are identical to idx1 as well as entries that don't have matches */ - $mapping = []; - - while ($idx1 < $cnt1 && $idx2 < $cnt2) { - $mapping[$idx2] = $idx1; - if (comp_line($ar1[$idx1], $ar2[$idx2], $is_reg)) { - $idx1++; - $idx2++; - continue; - } else { - $c1 = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1 + 1, $idx2, $cnt1, $cnt2, 10); - $c2 = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1, $idx2 + 1, $cnt1, $cnt2, 10); + return is_flaky_output($output) + || is_flaky($test); +} - if ($c1 > $c2) { - $old1[$idx1] = sprintf("{$line_number_spec}- ", $idx1 + 1) . $w[$idx1++]; - } elseif ($c2 > 0) { - $old2[$idx2] = sprintf("{$line_number_spec}+ ", $idx2 + 1) . $ar2[$idx2++]; - } else { - $old1[$idx1] = sprintf("{$line_number_spec}- ", $idx1 + 1) . $w[$idx1++]; - $old2[$idx2] = sprintf("{$line_number_spec}+ ", $idx2 + 1) . $ar2[$idx2++]; +function expectf_to_regex(?string $wanted): string +{ + $wanted_re = $wanted ?? ''; + + $wanted_re = preg_replace('/\r\n/', "\n", $wanted_re); + + // do preg_quote, but miss out any %r delimited sections + $temp = ""; + $r = "%r"; + $startOffset = 0; + $length = strlen($wanted_re); + while ($startOffset < $length) { + $start = strpos($wanted_re, $r, $startOffset); + if ($start !== false) { + // we have found a start tag + $end = strpos($wanted_re, $r, $start + 2); + if ($end === false) { + // unbalanced tag, ignore it. + $end = $start = $length; } - $last_printed_context_line = $idx1; + } else { + // no more %r sections + $start = $end = $length; } - } - $mapping[$idx2] = $idx1; - - reset($old1); - $k1 = key($old1); - $l1 = -2; - reset($old2); - $k2 = key($old2); - $l2 = -2; - $old_k1 = -1; - $add_context_lines = function (int $new_k1) use (&$old_k1, &$diff, $w, $context_line_count, $number_len) { - if ($old_k1 >= $new_k1 || !$context_line_count) { - return; + // quote a non re portion of the string + $temp .= preg_quote(substr($wanted_re, $startOffset, $start - $startOffset), '/'); + // add the re unquoted. + if ($end > $start) { + $temp .= '(' . substr($wanted_re, $start + 2, $end - $start - 2) . ')'; } - $end = $new_k1 - 1; - $range_end = min($end, $old_k1 + $context_line_count); - if ($old_k1 >= 0) { - while ($old_k1 < $range_end) { - $diff[] = str_repeat(' ', $number_len + 2) . $w[$old_k1++]; - } - } - if ($end - $context_line_count > $old_k1) { - $old_k1 = $end - $context_line_count; - if ($old_k1 > 0) { - // Add a '--' to mark sections where the common areas were truncated - $diff[] = '--'; - } - } - $old_k1 = max($old_k1, 0); - while ($old_k1 < $end) { - $diff[] = str_repeat(' ', $number_len + 2) . $w[$old_k1++]; - } - $old_k1 = $new_k1; - }; - - while ($k1 !== null || $k2 !== null) { - if ($k1 == $l1 + 1 || $k2 === null) { - $add_context_lines($k1); - $l1 = $k1; - $diff[] = current($old1); - $old_k1 = $k1; - $k1 = next($old1) ? key($old1) : null; - } elseif ($k2 == $l2 + 1 || $k1 === null) { - $add_context_lines($mapping[$k2]); - $l2 = $k2; - $diff[] = current($old2); - $k2 = next($old2) ? key($old2) : null; - } elseif ($k1 < $mapping[$k2]) { - $add_context_lines($k1); - $l1 = $k1; - $diff[] = current($old1); - $k1 = next($old1) ? key($old1) : null; - } else { - $add_context_lines($mapping[$k2]); - $l2 = $k2; - $diff[] = current($old2); - $k2 = next($old2) ? key($old2) : null; + $startOffset = $end + 2; + } + $wanted_re = $temp; + + return strtr($wanted_re, [ + '%e' => preg_quote(DIRECTORY_SEPARATOR, '/'), + '%s' => '[^\r\n]+', + '%S' => '[^\r\n]*', + '%a' => '.+?', + '%A' => '.*?', + '%w' => '\s*', + '%i' => '[+-]?\d+', + '%d' => '\d+', + '%x' => '[0-9a-fA-F]+', + '%f' => '[+-]?(?:\d+|(?=\.\d))(?:\.\d+)?(?:[Ee][+-]?\d+)?', + '%c' => '.', + '%0' => '\x00', + ]); +} +/** + * Map "Zend OPcache" to "opcache" and convert all ext names to lowercase. + */ +function remap_loaded_extensions_names(array $names): array +{ + $exts = []; + foreach ($names as $name) { + if ($name === 'Core') { + continue; } + $exts[] = ['Zend OPcache' => 'opcache'][$name] ?? strtolower($name); } - while ($idx1 < $cnt1) { - $add_context_lines($idx1 + 1); - $diff[] = sprintf("{$line_number_spec}- ", $idx1 + 1) . $w[$idx1++]; - } + return $exts; +} - while ($idx2 < $cnt2) { - if (isset($mapping[$idx2])) { - $add_context_lines($mapping[$idx2] + 1); - } - $diff[] = sprintf("{$line_number_spec}+ ", $idx2 + 1) . $ar2[$idx2++]; - } - $add_context_lines(min($old_k1 + $context_line_count + 1, $cnt1 + 1)); - if ($context_line_count && $old_k1 < $cnt1 + 1) { - // Add a '--' to mark sections where the common areas were truncated - $diff[] = '--'; - } +function generate_diff_external(string $diff_cmd, string $exp_file, string $output_file): string +{ + $retval = shell_exec("{$diff_cmd} {$exp_file} {$output_file}"); - return $diff; + return is_string($retval) ? $retval : 'Could not run external diff tool set through TEST_PHP_DIFF_CMD environment variable'; } function generate_diff(string $wanted, ?string $wanted_re, string $output): string { $w = explode("\n", $wanted); $o = explode("\n", $output); - $r = is_null($wanted_re) ? $w : explode("\n", $wanted_re); - $diff = generate_array_diff($r, $o, !is_null($wanted_re), $w); + $is_regex = $wanted_re !== null; - return implode(PHP_EOL, $diff); + $differ = new Differ(function ($expected, $new) use ($is_regex) { + if (!$is_regex) { + return $expected === $new; + } + $regex = '/^' . expectf_to_regex($expected). '$/s'; + return preg_match($regex, $new); + }); + return $differ->diff($w, $o); } function error(string $message): void @@ -3049,7 +2945,7 @@ function error(string $message): void exit(1); } -function settings2array(array $settings, &$ini_settings): void +function settings2array(array $settings, array &$ini_settings): void { foreach ($settings as $setting) { if (strpos($setting, '=') !== false) { @@ -3104,7 +3000,7 @@ function compute_summary(): void global $n_total, $test_results, $ignored_by_ext, $sum_results, $percent_results; $n_total = count($test_results); - $n_total += $ignored_by_ext; + $n_total += count($ignored_by_ext); $sum_results = [ 'PASSED' => 0, 'WARNED' => 0, @@ -3120,7 +3016,7 @@ function compute_summary(): void $sum_results[$v]++; } - $sum_results['SKIPPED'] += $ignored_by_ext; + $sum_results['SKIPPED'] += count($ignored_by_ext); $percent_results = []; foreach ($sum_results as $v => $n) { @@ -3152,43 +3048,43 @@ function get_summary(bool $show_ext_summary): string ===================================================================== TEST RESULT SUMMARY --------------------------------------------------------------------- -Exts skipped : ' . sprintf('%4d', $exts_skipped) . ' -Exts tested : ' . sprintf('%4d', $exts_tested) . ' +Exts skipped : ' . sprintf('%5d', count($exts_skipped)) . ($exts_skipped ? ' (' . implode(', ', $exts_skipped) . ')' : '') . ' +Exts tested : ' . sprintf('%5d', count($exts_tested)) . ' --------------------------------------------------------------------- '; } $summary .= ' -Number of tests : ' . sprintf('%4d', $n_total) . ' ' . sprintf('%8d', $x_total); +Number of tests : ' . sprintf('%5d', $n_total) . ' ' . sprintf('%8d', $x_total); if ($sum_results['BORKED']) { $summary .= ' -Tests borked : ' . sprintf('%4d (%5.1f%%)', $sum_results['BORKED'], $percent_results['BORKED']) . ' --------'; +Tests borked : ' . sprintf('%5d (%5.1f%%)', $sum_results['BORKED'], $percent_results['BORKED']) . ' --------'; } $summary .= ' -Tests skipped : ' . sprintf('%4d (%5.1f%%)', $sum_results['SKIPPED'], $percent_results['SKIPPED']) . ' -------- -Tests warned : ' . sprintf('%4d (%5.1f%%)', $sum_results['WARNED'], $percent_results['WARNED']) . ' ' . sprintf('(%5.1f%%)', $x_warned) . ' -Tests failed : ' . sprintf('%4d (%5.1f%%)', $sum_results['FAILED'], $percent_results['FAILED']) . ' ' . sprintf('(%5.1f%%)', $x_failed); +Tests skipped : ' . sprintf('%5d (%5.1f%%)', $sum_results['SKIPPED'], $percent_results['SKIPPED']) . ' -------- +Tests warned : ' . sprintf('%5d (%5.1f%%)', $sum_results['WARNED'], $percent_results['WARNED']) . ' ' . sprintf('(%5.1f%%)', $x_warned) . ' +Tests failed : ' . sprintf('%5d (%5.1f%%)', $sum_results['FAILED'], $percent_results['FAILED']) . ' ' . sprintf('(%5.1f%%)', $x_failed); if ($sum_results['XFAILED']) { $summary .= ' -Expected fail : ' . sprintf('%4d (%5.1f%%)', $sum_results['XFAILED'], $percent_results['XFAILED']) . ' ' . sprintf('(%5.1f%%)', $x_xfailed); +Expected fail : ' . sprintf('%5d (%5.1f%%)', $sum_results['XFAILED'], $percent_results['XFAILED']) . ' ' . sprintf('(%5.1f%%)', $x_xfailed); } if ($valgrind) { $summary .= ' -Tests leaked : ' . sprintf('%4d (%5.1f%%)', $sum_results['LEAKED'], $percent_results['LEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_leaked); +Tests leaked : ' . sprintf('%5d (%5.1f%%)', $sum_results['LEAKED'], $percent_results['LEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_leaked); if ($sum_results['XLEAKED']) { $summary .= ' -Expected leak : ' . sprintf('%4d (%5.1f%%)', $sum_results['XLEAKED'], $percent_results['XLEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_xleaked); +Expected leak : ' . sprintf('%5d (%5.1f%%)', $sum_results['XLEAKED'], $percent_results['XLEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_xleaked); } } $summary .= ' -Tests passed : ' . sprintf('%4d (%5.1f%%)', $sum_results['PASSED'], $percent_results['PASSED']) . ' ' . sprintf('(%5.1f%%)', $x_passed) . ' +Tests passed : ' . sprintf('%5d (%5.1f%%)', $sum_results['PASSED'], $percent_results['PASSED']) . ' ' . sprintf('(%5.1f%%)', $x_passed) . ' --------------------------------------------------------------------- -Time taken : ' . sprintf('%4d seconds', $end_time - $start_time) . ' +Time taken : ' . sprintf('%5.3f seconds', ($end_time - $start_time) / 1e9) . ' ===================================================================== '; $failed_test_summary = ''; @@ -3209,18 +3105,6 @@ function get_summary(bool $show_ext_summary): string $failed_test_summary .= "=====================================================================\n"; } - if (count($PHP_FAILED_TESTS['XFAILED'])) { - $failed_test_summary .= ' -===================================================================== -EXPECTED FAILED TEST SUMMARY ---------------------------------------------------------------------- -'; - foreach ($PHP_FAILED_TESTS['XFAILED'] as $failed_test_data) { - $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . "\n"; - } - $failed_test_summary .= "=====================================================================\n"; - } - if (count($PHP_FAILED_TESTS['BORKED'])) { $failed_test_summary .= ' ===================================================================== @@ -3271,19 +3155,6 @@ function get_summary(bool $show_ext_summary): string $failed_test_summary .= "=====================================================================\n"; } - if (count($PHP_FAILED_TESTS['XLEAKED'])) { - $failed_test_summary .= ' -===================================================================== -EXPECTED LEAK TEST SUMMARY ---------------------------------------------------------------------- -'; - foreach ($PHP_FAILED_TESTS['XLEAKED'] as $failed_test_data) { - $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . "\n"; - } - - $failed_test_summary .= "=====================================================================\n"; - } - if ($failed_test_summary && !getenv('NO_PHPTEST_SUMMARY')) { $summary .= $failed_test_summary; } @@ -3291,14 +3162,14 @@ function get_summary(bool $show_ext_summary): string return $summary; } -function show_start($start_time): void +function show_start(int $start_timestamp): void { - echo "TIME START " . date('Y-m-d H:i:s', $start_time) . "\n=====================================================================\n"; + echo "TIME START " . date('Y-m-d H:i:s', $start_timestamp) . "\n=====================================================================\n"; } -function show_end($end_time): void +function show_end(int $start_timestamp, int|float $start_time, int|float $end_time): void { - echo "=====================================================================\nTIME END " . date('Y-m-d H:i:s', $end_time) . "\n"; + echo "=====================================================================\nTIME END " . date('Y-m-d H:i:s', $start_timestamp + (int)(($end_time - $start_time)/1e9)) . "\n"; } function show_summary(): void @@ -3308,22 +3179,22 @@ function show_summary(): void function show_redirect_start(string $tests, string $tested, string $tested_file): void { - global $SHOW_ONLY_GROUPS; + global $SHOW_ONLY_GROUPS, $show_progress; if (!$SHOW_ONLY_GROUPS || in_array('REDIRECT', $SHOW_ONLY_GROUPS)) { echo "REDIRECT $tests ($tested [$tested_file]) begin\n"; - } else { + } elseif ($show_progress) { clear_show_test(); } } function show_redirect_ends(string $tests, string $tested, string $tested_file): void { - global $SHOW_ONLY_GROUPS; + global $SHOW_ONLY_GROUPS, $show_progress; if (!$SHOW_ONLY_GROUPS || in_array('REDIRECT', $SHOW_ONLY_GROUPS)) { echo "REDIRECT $tests ($tested [$tested_file]) done\n"; - } else { + } elseif ($show_progress) { clear_show_test(); } } @@ -3345,7 +3216,7 @@ function clear_show_test(): void // Parallel testing global $workerID; - if (!$workerID) { + if (!$workerID && isset($line_length)) { // Write over the last line to avoid random trailing chars on next echo echo str_repeat(" ", $line_length), "\r"; } @@ -3362,10 +3233,9 @@ function show_result( string $result, string $tested, string $tested_file, - string $extra = '', - ?array $temp_filenames = null + string $extra = '' ): void { - global $SHOW_ONLY_GROUPS, $colorize; + global $SHOW_ONLY_GROUPS, $colorize, $show_progress; if (!$SHOW_ONLY_GROUPS || in_array($result, $SHOW_ONLY_GROUPS)) { if ($colorize) { @@ -3387,324 +3257,378 @@ function show_result( } else { echo "$result $tested [$tested_file] $extra\n"; } - } elseif (!$SHOW_ONLY_GROUPS) { + } elseif ($show_progress) { clear_show_test(); } +} +class BorkageException extends Exception +{ } -function junit_init(): void +class JUnit { - // Check whether a junit log is wanted. - global $workerID; - $JUNIT = getenv('TEST_PHP_JUNIT'); - if (empty($JUNIT)) { - $GLOBALS['JUNIT'] = false; - return; - } - if ($workerID) { - $fp = null; - } elseif (!$fp = fopen($JUNIT, 'w')) { - error("Failed to open $JUNIT for writing."); - } - $GLOBALS['JUNIT'] = [ - 'fp' => $fp, - 'name' => 'PHP', + private bool $enabled = true; + private $fp = null; + private array $suites = []; + private array $rootSuite = self::EMPTY_SUITE + ['name' => 'php']; + + private const EMPTY_SUITE = [ 'test_total' => 0, 'test_pass' => 0, 'test_fail' => 0, 'test_error' => 0, 'test_skip' => 0, 'test_warn' => 0, + 'files' => [], 'execution_time' => 0, - 'suites' => [], - 'files' => [] ]; -} -function junit_save_xml(): void -{ - global $JUNIT; - if (!junit_enabled()) { - return; + /** + * @throws Exception + */ + public function __construct(array $env, int $workerID) + { + // Check whether a junit log is wanted. + $fileName = $env['TEST_PHP_JUNIT'] ?? null; + if (empty($fileName)) { + $this->enabled = false; + return; + } + if (!$workerID && !$this->fp = fopen($fileName, 'w')) { + throw new Exception("Failed to open $fileName for writing."); + } } - $xml = '<' . '?' . 'xml version="1.0" encoding="UTF-8"' . '?' . '>' . PHP_EOL; - $xml .= sprintf( - '' . PHP_EOL, - $JUNIT['name'], - $JUNIT['test_total'], - $JUNIT['test_fail'], - $JUNIT['test_error'], - $JUNIT['test_skip'], - $JUNIT['execution_time'] - ); - $xml .= junit_get_suite_xml(); - $xml .= ''; - fwrite($JUNIT['fp'], $xml); -} + public function isEnabled(): bool + { + return $this->enabled; + } -function junit_get_suite_xml(string $suite_name = ''): string -{ - global $JUNIT; - - $result = ""; - - foreach ($JUNIT['suites'] as $suite_name => $suite) { - $result .= sprintf( - '' . PHP_EOL, - $suite['name'], - $suite['test_total'], - $suite['test_fail'], - $suite['test_error'], - $suite['test_skip'], - $suite['execution_time'] - ); + public function clear(): void + { + $this->rootSuite = self::EMPTY_SUITE + ['name' => 'php']; + $this->suites = []; + } - if (!empty($suite_name)) { - foreach ($suite['files'] as $file) { - $result .= $JUNIT['files'][$file]['xml']; - } + public function saveXML(): void + { + if (!$this->enabled) { + return; } - $result .= '' . PHP_EOL; + $xml = '<' . '?' . 'xml version="1.0" encoding="UTF-8"' . '?' . '>' . PHP_EOL; + $xml .= sprintf( + '' . PHP_EOL, + $this->rootSuite['name'], + $this->rootSuite['test_total'], + $this->rootSuite['test_fail'], + $this->rootSuite['test_error'], + $this->rootSuite['test_skip'], + $this->rootSuite['execution_time'] + ); + $xml .= $this->getSuitesXML(); + $xml .= ''; + fwrite($this->fp, $xml); } - return $result; -} + private function getSuitesXML(): string + { + $result = ''; + + foreach ($this->suites as $suite_name => $suite) { + $result .= sprintf( + '' . PHP_EOL, + $suite['name'], + $suite['test_total'], + $suite['test_fail'], + $suite['test_error'], + $suite['test_skip'], + $suite['execution_time'] + ); + + if (!empty($suite_name)) { + foreach ($suite['files'] as $file) { + $result .= $this->rootSuite['files'][$file]['xml']; + } + } -function junit_enabled(): bool -{ - global $JUNIT; - return !empty($JUNIT); -} + $result .= '' . PHP_EOL; + } -/** - * @param array|string $type - */ -function junit_mark_test_as( - $type, - string $file_name, - string $test_name, - ?float $time = null, - string $message = '', - string $details = '' -): void { - global $JUNIT; - if (!junit_enabled()) { - return; + return $result; } - $suite = junit_get_suitename_for($file_name); - - junit_suite_record($suite, 'test_total'); + public function markTestAs( + $type, + string $file_name, + string $test_name, + ?int $time = null, + string $message = '', + string $details = '' + ): void { + if (!$this->enabled) { + return; + } - $time = $time ?? junit_get_timer($file_name); - junit_suite_record($suite, 'execution_time', $time); + $suite = $this->getSuiteName($file_name); - $escaped_details = htmlspecialchars($details, ENT_QUOTES, 'UTF-8'); - $escaped_details = preg_replace_callback('/[\0-\x08\x0B\x0C\x0E-\x1F]/', function (array $c): string { - return sprintf('[[0x%02x]]', ord($c[0])); - }, $escaped_details); - $escaped_message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); + $this->record($suite, 'test_total'); - $escaped_test_name = htmlspecialchars($file_name . ' (' . $test_name . ')', ENT_QUOTES); - $JUNIT['files'][$file_name]['xml'] = "\n"; + $time = $time ?? $this->getTimer($file_name); + $this->record($suite, 'execution_time', $time); - if (is_array($type)) { - $output_type = $type[0] . 'ED'; - $temp = array_intersect(['XFAIL', 'XLEAK', 'FAIL', 'WARN'], $type); - $type = reset($temp); - } else { - $output_type = $type . 'ED'; - } - - if ('PASS' == $type || 'XFAIL' == $type || 'XLEAK' == $type) { - junit_suite_record($suite, 'test_pass'); - } elseif ('BORK' == $type) { - junit_suite_record($suite, 'test_error'); - $JUNIT['files'][$file_name]['xml'] .= "\n"; - } elseif ('SKIP' == $type) { - junit_suite_record($suite, 'test_skip'); - $JUNIT['files'][$file_name]['xml'] .= "$escaped_message\n"; - } elseif ('WARN' == $type) { - junit_suite_record($suite, 'test_warn'); - $JUNIT['files'][$file_name]['xml'] .= "$escaped_message\n"; - } elseif ('FAIL' == $type) { - junit_suite_record($suite, 'test_fail'); - $JUNIT['files'][$file_name]['xml'] .= "$escaped_details\n"; - } else { - junit_suite_record($suite, 'test_error'); - $JUNIT['files'][$file_name]['xml'] .= "$escaped_details\n"; - } + $escaped_details = htmlspecialchars($details, ENT_QUOTES, 'UTF-8'); + $escaped_details = preg_replace_callback('/[\0-\x08\x0B\x0C\x0E-\x1F]/', function ($c) { + return sprintf('[[0x%02x]]', ord($c[0])); + }, $escaped_details); + $escaped_message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8'); - $JUNIT['files'][$file_name]['xml'] .= "\n"; -} + $escaped_test_name = htmlspecialchars($file_name . ' (' . $test_name . ')', ENT_QUOTES); + $this->rootSuite['files'][$file_name]['xml'] = "\n"; -function junit_suite_record(string $suite, string $param, float $value = 1): void -{ - global $JUNIT; + if (is_array($type)) { + $output_type = $type[0] . 'ED'; + $temp = array_intersect(['XFAIL', 'XLEAK', 'FAIL', 'WARN'], $type); + $type = reset($temp); + } else { + $output_type = $type . 'ED'; + } - $JUNIT[$param] += $value; - $JUNIT['suites'][$suite][$param] += $value; -} + if ('PASS' == $type || 'XFAIL' == $type || 'XLEAK' == $type) { + $this->record($suite, 'test_pass'); + } elseif ('BORK' == $type) { + $this->record($suite, 'test_error'); + $this->rootSuite['files'][$file_name]['xml'] .= "\n"; + } elseif ('SKIP' == $type) { + $this->record($suite, 'test_skip'); + $this->rootSuite['files'][$file_name]['xml'] .= "$escaped_message\n"; + } elseif ('WARN' == $type) { + $this->record($suite, 'test_warn'); + $this->rootSuite['files'][$file_name]['xml'] .= "$escaped_message\n"; + } elseif ('FAIL' == $type) { + $this->record($suite, 'test_fail'); + $this->rootSuite['files'][$file_name]['xml'] .= "$escaped_details\n"; + } else { + $this->record($suite, 'test_error'); + $this->rootSuite['files'][$file_name]['xml'] .= "$escaped_details\n"; + } -function junit_get_timer(string $file_name): float -{ - global $JUNIT; - if (!junit_enabled()) { - return 0; + $this->rootSuite['files'][$file_name]['xml'] .= "\n"; } - if (isset($JUNIT['files'][$file_name]['total'])) { - return number_format($JUNIT['files'][$file_name]['total'], 4); + private function record(string $suite, string $param, $value = 1): void + { + $this->rootSuite[$param] += $value; + $this->suites[$suite][$param] += $value; } - return 0; -} + private function getTimer(string $file_name) + { + if (!$this->enabled) { + return 0; + } -function junit_start_timer(string $file_name): void -{ - global $JUNIT; - if (!junit_enabled()) { - return; + if (isset($this->rootSuite['files'][$file_name]['total'])) { + return number_format($this->rootSuite['files'][$file_name]['total'], 4); + } + + return 0; } - if (!isset($JUNIT['files'][$file_name]['start'])) { - $JUNIT['files'][$file_name]['start'] = microtime(true); + public function startTimer(string $file_name): void + { + if (!$this->enabled) { + return; + } + + if (!isset($this->rootSuite['files'][$file_name]['start'])) { + $this->rootSuite['files'][$file_name]['start'] = microtime(true); - $suite = junit_get_suitename_for($file_name); - junit_init_suite($suite); - $JUNIT['suites'][$suite]['files'][$file_name] = $file_name; + $suite = $this->getSuiteName($file_name); + $this->initSuite($suite); + $this->suites[$suite]['files'][$file_name] = $file_name; + } } -} -function junit_get_suitename_for(string $file_name): string -{ - return junit_path_to_classname(dirname($file_name)); -} + public function getSuiteName(string $file_name): string + { + return $this->pathToClassName(dirname($file_name)); + } -function junit_path_to_classname(string $file_name): string -{ - global $JUNIT; + private function pathToClassName(string $file_name): string + { + if (!$this->enabled) { + return ''; + } - if (!junit_enabled()) { - return ''; - } + $ret = $this->rootSuite['name']; + $_tmp = []; - $ret = $JUNIT['name']; - $_tmp = []; + // lookup whether we're in the PHP source checkout + $max = 5; + if (is_file($file_name)) { + $dir = dirname(realpath($file_name)); + } else { + $dir = realpath($file_name); + } + do { + array_unshift($_tmp, basename($dir)); + $chk = $dir . DIRECTORY_SEPARATOR . "main" . DIRECTORY_SEPARATOR . "php_version.h"; + $dir = dirname($dir); + } while (!file_exists($chk) && --$max > 0); + if (file_exists($chk)) { + if ($max) { + array_shift($_tmp); + } + foreach ($_tmp as $p) { + $ret .= "." . preg_replace(",[^a-z0-9]+,i", ".", $p); + } + return $ret; + } - // lookup whether we're in the PHP source checkout - $max = 5; - if (is_file($file_name)) { - $dir = dirname(realpath($file_name)); - } else { - $dir = realpath($file_name); + return $this->rootSuite['name'] . '.' . str_replace([DIRECTORY_SEPARATOR, '-'], '.', $file_name); } - do { - array_unshift($_tmp, basename($dir)); - $chk = $dir . DIRECTORY_SEPARATOR . "main" . DIRECTORY_SEPARATOR . "php_version.h"; - $dir = dirname($dir); - } while (!file_exists($chk) && --$max > 0); - if (file_exists($chk)) { - if ($max) { - array_shift($_tmp); + + public function initSuite(string $suite_name): void + { + if (!$this->enabled) { + return; } - foreach ($_tmp as $p) { - $ret .= "." . preg_replace(",[^a-z0-9]+,i", ".", $p); + + if (!empty($this->suites[$suite_name])) { + return; } - return $ret; + + $this->suites[$suite_name] = self::EMPTY_SUITE + ['name' => $suite_name]; } - return $JUNIT['name'] . '.' . str_replace([DIRECTORY_SEPARATOR, '-'], '.', $file_name); -} + /** + * @throws Exception + */ + public function stopTimer(string $file_name): void + { + if (!$this->enabled) { + return; + } -function junit_init_suite(string $suite_name): void -{ - global $JUNIT; - if (!junit_enabled()) { - return; + if (!isset($this->rootSuite['files'][$file_name]['start'])) { + throw new Exception("Timer for $file_name was not started!"); + } + + if (!isset($this->rootSuite['files'][$file_name]['total'])) { + $this->rootSuite['files'][$file_name]['total'] = 0; + } + + $start = $this->rootSuite['files'][$file_name]['start']; + $this->rootSuite['files'][$file_name]['total'] += microtime(true) - $start; + unset($this->rootSuite['files'][$file_name]['start']); } - if (!empty($JUNIT['suites'][$suite_name])) { - return; + public function mergeResults(?JUnit $other): void + { + if (!$this->enabled || !$other) { + return; + } + + $this->mergeSuites($this->rootSuite, $other->rootSuite); + foreach ($other->suites as $name => $suite) { + if (!isset($this->suites[$name])) { + $this->suites[$name] = $suite; + continue; + } + + $this->mergeSuites($this->suites[$name], $suite); + } } - $JUNIT['suites'][$suite_name] = [ - 'name' => $suite_name, - 'test_total' => 0, - 'test_pass' => 0, - 'test_fail' => 0, - 'test_error' => 0, - 'test_skip' => 0, - 'test_warn' => 0, - 'files' => [], - 'execution_time' => 0, - ]; + private function mergeSuites(array &$dest, array $source): void + { + $dest['test_total'] += $source['test_total']; + $dest['test_pass'] += $source['test_pass']; + $dest['test_fail'] += $source['test_fail']; + $dest['test_error'] += $source['test_error']; + $dest['test_skip'] += $source['test_skip']; + $dest['test_warn'] += $source['test_warn']; + $dest['execution_time'] += $source['execution_time']; + $dest['files'] += $source['files']; + } } -function junit_finish_timer(string $file_name): void +class SkipCache { - global $JUNIT; - if (!junit_enabled()) { - return; - } + private bool $enable; + private bool $keepFile; - if (!isset($JUNIT['files'][$file_name]['start'])) { - error("Timer for $file_name was not started!"); - } + private array $skips = []; + private array $extensions = []; + + private int $hits = 0; + private int $misses = 0; + private int $extHits = 0; + private int $extMisses = 0; - if (!isset($JUNIT['files'][$file_name]['total'])) { - $JUNIT['files'][$file_name]['total'] = 0; + public function __construct(bool $enable, bool $keepFile) + { + $this->enable = $enable; + $this->keepFile = $keepFile; } - $start = $JUNIT['files'][$file_name]['start']; - $JUNIT['files'][$file_name]['total'] += microtime(true) - $start; - unset($JUNIT['files'][$file_name]['start']); -} + public function checkSkip(string $php, string $code, string $checkFile, string $tempFile, array $env): string + { + // Extension tests frequently use something like $dir"; + + if (isset($this->skips[$key][$code])) { + $this->hits++; + if ($this->keepFile) { + save_text($checkFile, $code, $tempFile); + } + return $this->skips[$key][$code]; + } -function junit_merge_results(array $junit): void -{ - global $JUNIT; - $JUNIT['test_total'] += $junit['test_total']; - $JUNIT['test_pass'] += $junit['test_pass']; - $JUNIT['test_fail'] += $junit['test_fail']; - $JUNIT['test_error'] += $junit['test_error']; - $JUNIT['test_skip'] += $junit['test_skip']; - $JUNIT['test_warn'] += $junit['test_warn']; - $JUNIT['execution_time'] += $junit['execution_time']; - $JUNIT['files'] += $junit['files']; - foreach ($junit['suites'] as $name => $suite) { - if (!isset($JUNIT['suites'][$name])) { - $JUNIT['suites'][$name] = $suite; - continue; + save_text($checkFile, $code, $tempFile); + $result = trim(system_with_timeout("$php \"$checkFile\"", $env)); + if (strpos($result, 'nocache') === 0) { + $result = ''; + } else if ($this->enable) { + $this->skips[$key][$code] = $result; } + $this->misses++; + + if (!$this->keepFile) { + @unlink($checkFile); + } + + return $result; + } + + public function getExtensions(string $php): array + { + if (isset($this->extensions[$php])) { + $this->extHits++; + return $this->extensions[$php]; + } + + $extDir = shell_exec("$php -d display_errors=0 -r \"echo ini_get('extension_dir');\""); + $extensionsNames = explode(",", shell_exec("$php -d display_errors=0 -r \"echo implode(',', get_loaded_extensions());\"")); + $extensions = remap_loaded_extensions_names($extensionsNames); + + $result = [$extDir, $extensions]; + $this->extensions[$php] = $result; + $this->extMisses++; - $SUITE =& $JUNIT['suites'][$name]; - $SUITE['test_total'] += $suite['test_total']; - $SUITE['test_pass'] += $suite['test_pass']; - $SUITE['test_fail'] += $suite['test_fail']; - $SUITE['test_error'] += $suite['test_error']; - $SUITE['test_skip'] += $suite['test_skip']; - $SUITE['test_warn'] += $suite['test_warn']; - $SUITE['execution_time'] += $suite['execution_time']; - $SUITE['files'] += $suite['files']; + return $result; } } class RuntestsValgrind { - protected $version = ''; - protected $header = ''; - protected $version_3_3_0 = false; - protected $version_3_8_0 = false; - protected $tool = null; - - public function getVersion(): string - { - return $this->version; - } + protected string $header; + protected bool $version_3_8_0; + protected string $tool; public function getHeader(): string { @@ -3717,17 +3641,14 @@ public function __construct(array $environment, string $tool = 'memcheck') $header = system_with_timeout("valgrind --tool={$this->tool} --version", $environment); if (!$header) { error("Valgrind returned no version info for {$this->tool}, cannot proceed.\n". - "Please check if Valgrind is installed and the tool is named correctly."); + "Please check if Valgrind is installed and the tool is named correctly."); } $count = 0; $version = preg_replace("/valgrind-(\d+)\.(\d+)\.(\d+)([.\w_-]+)?(\s+)/", '$1.$2.$3', $header, 1, $count); if ($count != 1) { error("Valgrind returned invalid version info (\"{$header}\") for {$this->tool}, cannot proceed."); } - $this->version = $version; - $this->header = sprintf( - "%s (%s)", trim($header), $this->tool); - $this->version_3_3_0 = version_compare($version, '3.3.0', '>='); + $this->header = sprintf("%s (%s)", trim($header), $this->tool); $this->version_3_8_0 = version_compare($version, '3.8.0', '>='); } @@ -3740,12 +3661,208 @@ public function wrapCommand(string $cmd, string $memcheck_filename, bool $check_ /* --vex-iropt-register-updates=allregs-at-mem-access is necessary for phpdbg watchpoint tests */ if ($this->version_3_8_0) { - /* valgrind 3.3.0+ doesn't have --log-file-exactly option */ return "$vcmd --vex-iropt-register-updates=allregs-at-mem-access --log-file=$memcheck_filename $cmd"; - } elseif ($this->version_3_3_0) { - return "$vcmd --vex-iropt-precise-memory-exns=yes --log-file=$memcheck_filename $cmd"; + } + return "$vcmd --vex-iropt-precise-memory-exns=yes --log-file=$memcheck_filename $cmd"; + } +} + +class TestFile +{ + private string $fileName; + + private array $sections = ['TEST' => '']; + + private const ALLOWED_SECTIONS = [ + 'EXPECT', 'EXPECTF', 'EXPECTREGEX', 'EXPECTREGEX_EXTERNAL', 'EXPECT_EXTERNAL', 'EXPECTF_EXTERNAL', 'EXPECTHEADERS', + 'POST', 'POST_RAW', 'GZIP_POST', 'DEFLATE_POST', 'PUT', 'GET', 'COOKIE', 'ARGS', + 'FILE', 'FILEEOF', 'FILE_EXTERNAL', 'REDIRECTTEST', + 'CAPTURE_STDIO', 'STDIN', 'CGI', 'PHPDBG', + 'INI', 'ENV', 'EXTENSIONS', + 'SKIPIF', 'XFAIL', 'XLEAK', 'CLEAN', + 'CREDITS', 'DESCRIPTION', 'CONFLICTS', 'WHITESPACE_SENSITIVE', + 'FLAKY', + ]; + + /** + * @throws BorkageException + */ + public function __construct(string $fileName, bool $inRedirect) + { + $this->fileName = $fileName; + + $this->readFile(); + $this->validateAndProcess($inRedirect); + } + + public function hasSection(string $name): bool + { + return isset($this->sections[$name]); + } + + public function hasAnySections(string ...$names): bool + { + foreach ($names as $section) { + if (isset($this->sections[$section])) { + return true; + } + } + + return false; + } + + public function sectionNotEmpty(string $name): bool + { + return !empty($this->sections[$name]); + } + + /** + * @throws Exception + */ + public function getSection(string $name): string + { + if (!isset($this->sections[$name])) { + throw new Exception("Section $name not found"); + } + return $this->sections[$name]; + } + + public function getName(): string + { + return trim($this->getSection('TEST')); + } + + public function isCGI(): bool + { + return $this->hasSection('CGI') + || $this->sectionNotEmpty('GET') + || $this->sectionNotEmpty('POST') + || $this->sectionNotEmpty('GZIP_POST') + || $this->sectionNotEmpty('DEFLATE_POST') + || $this->sectionNotEmpty('POST_RAW') + || $this->sectionNotEmpty('PUT') + || $this->sectionNotEmpty('COOKIE') + || $this->sectionNotEmpty('EXPECTHEADERS'); + } + + /** + * TODO Refactor to make it not needed + */ + public function setSection(string $name, string $value): void + { + $this->sections[$name] = $value; + } + + /** + * Load the sections of the test file + * @throws BorkageException + */ + private function readFile(): void + { + $fp = fopen($this->fileName, "rb") or error("Cannot open test file: {$this->fileName}"); + + if (!feof($fp)) { + $line = fgets($fp); + + if ($line === false) { + throw new BorkageException("cannot read test"); + } } else { - return "$vcmd --vex-iropt-precise-memory-exns=yes --log-file-exactly=$memcheck_filename $cmd"; + throw new BorkageException("empty test [{$this->fileName}]"); + } + if (strncmp('--TEST--', $line, 8)) { + throw new BorkageException("tests must start with --TEST-- [{$this->fileName}]"); + } + + $section = 'TEST'; + $secfile = false; + $secdone = false; + + while (!feof($fp)) { + $line = fgets($fp); + + if ($line === false) { + break; + } + + // Match the beginning of a section. + if (preg_match('/^--([_A-Z]+)--/', $line, $r)) { + $section = $r[1]; + + if (isset($this->sections[$section]) && $this->sections[$section]) { + throw new BorkageException("duplicated $section section"); + } + + // check for unknown sections + if (!in_array($section, self::ALLOWED_SECTIONS)) { + throw new BorkageException('Unknown section "' . $section . '"'); + } + + $this->sections[$section] = ''; + $secfile = $section == 'FILE' || $section == 'FILEEOF' || $section == 'FILE_EXTERNAL'; + $secdone = false; + continue; + } + + // Add to the section text. + if (!$secdone) { + $this->sections[$section] .= $line; + } + + // End of actual test? + if ($secfile && preg_match('/^===DONE===\s*$/', $line)) { + $secdone = true; + } + } + + fclose($fp); + } + + /** + * @throws BorkageException + */ + private function validateAndProcess(bool $inRedirect): void + { + // the redirect section allows a set of tests to be reused outside of + // a given test dir + if ($this->hasSection('REDIRECTTEST')) { + if ($inRedirect) { + throw new BorkageException("Can't redirect a test from within a redirected test"); + } + return; + } + if (!$this->hasSection('PHPDBG') && $this->hasSection('FILE') + $this->hasSection('FILEEOF') + $this->hasSection('FILE_EXTERNAL') != 1) { + throw new BorkageException("missing section --FILE--"); + } + + if ($this->hasSection('FILEEOF')) { + $this->sections['FILE'] = preg_replace("/[\r\n]+$/", '', $this->sections['FILEEOF']); + unset($this->sections['FILEEOF']); + } + + foreach (['FILE', 'EXPECT', 'EXPECTF', 'EXPECTREGEX'] as $prefix) { + // For grepping: FILE_EXTERNAL, EXPECT_EXTERNAL, EXPECTF_EXTERNAL, EXPECTREGEX_EXTERNAL + $key = $prefix . '_EXTERNAL'; + + if ($this->hasSection($key)) { + // don't allow tests to retrieve files from anywhere but this subdirectory + $dir = dirname($this->fileName); + $fileName = $dir . '/' . trim(str_replace('..', '', $this->getSection($key))); + + if (file_exists($fileName)) { + $this->sections[$prefix] = file_get_contents($fileName); + } else { + throw new BorkageException("could not load --" . $key . "-- " . $dir . '/' . trim($fileName)); + } + } + } + + if (($this->hasSection('EXPECT') + $this->hasSection('EXPECTF') + $this->hasSection('EXPECTREGEX')) != 1) { + throw new BorkageException("missing section --EXPECT--, --EXPECTF-- or --EXPECTREGEX--"); + } + + if ($this->hasSection('PHPDBG') && !$this->hasSection('STDIN')) { + $this->sections['STDIN'] = $this->sections['PHPDBG'] . "\n"; } } } @@ -3777,4 +3894,277 @@ function check_proc_open_function_exists(): void } } -main(); \ No newline at end of file +function bless_failed_tests(array $failedTests): void +{ + if (empty($failedTests)) { + return; + } + $args = [ + PHP_BINARY, + __DIR__ . '/scripts/dev/bless_tests.php', + ]; + foreach ($failedTests as $test) { + $args[] = $test['name']; + } + proc_open($args, [], $pipes); +} + +/* + * BSD 3-Clause License + * + * Copyright (c) 2002-2023, Sebastian Bergmann + * All rights reserved. + * + * This file is part of sebastian/diff. + * https://github.com/sebastianbergmann/diff + */ + +final class Differ +{ + public const OLD = 0; + public const ADDED = 1; + public const REMOVED = 2; + private DiffOutputBuilder $outputBuilder; + private $isEqual; + + public function __construct(callable $isEqual) + { + $this->outputBuilder = new DiffOutputBuilder; + $this->isEqual = $isEqual; + } + + public function diff(array $from, array $to): string + { + $diff = $this->diffToArray($from, $to); + + return $this->outputBuilder->getDiff($diff); + } + + public function diffToArray(array $from, array $to): array + { + $fromLine = 1; + $toLine = 1; + + [$from, $to, $start, $end] = $this->getArrayDiffParted($from, $to); + + $common = $this->calculateCommonSubsequence(array_values($from), array_values($to)); + $diff = []; + + foreach ($start as $token) { + $diff[] = [$token, self::OLD]; + $fromLine++; + $toLine++; + } + + reset($from); + reset($to); + + foreach ($common as $token) { + while (!empty($from) && !($this->isEqual)(reset($from), $token)) { + $diff[] = [array_shift($from), self::REMOVED, $fromLine++]; + } + + while (!empty($to) && !($this->isEqual)($token, reset($to))) { + $diff[] = [array_shift($to), self::ADDED, $toLine++]; + } + + $diff[] = [$token, self::OLD]; + $fromLine++; + $toLine++; + + array_shift($from); + array_shift($to); + } + + while (($token = array_shift($from)) !== null) { + $diff[] = [$token, self::REMOVED, $fromLine++]; + } + + while (($token = array_shift($to)) !== null) { + $diff[] = [$token, self::ADDED, $toLine++]; + } + + foreach ($end as $token) { + $diff[] = [$token, self::OLD]; + } + + return $diff; + } + + private function getArrayDiffParted(array &$from, array &$to): array + { + $start = []; + $end = []; + + reset($to); + + foreach ($from as $k => $v) { + $toK = key($to); + + if (($this->isEqual)($toK, $k) && ($this->isEqual)($v, $to[$k])) { + $start[$k] = $v; + + unset($from[$k], $to[$k]); + } else { + break; + } + } + + end($from); + end($to); + + do { + $fromK = key($from); + $toK = key($to); + + if (null === $fromK || null === $toK || !($this->isEqual)(current($from), current($to))) { + break; + } + + prev($from); + prev($to); + + $end = [$fromK => $from[$fromK]] + $end; + unset($from[$fromK], $to[$toK]); + } while (true); + + return [$from, $to, $start, $end]; + } + + public function calculateCommonSubsequence(array $from, array $to): array + { + $cFrom = count($from); + $cTo = count($to); + + if ($cFrom === 0) { + return []; + } + + if ($cFrom === 1) { + foreach ($to as $toV) { + if (($this->isEqual)($from[0], $toV)) { + return [$toV]; + } + } + + return []; + } + + $i = (int) ($cFrom / 2); + $fromStart = array_slice($from, 0, $i); + $fromEnd = array_slice($from, $i); + $llB = $this->commonSubsequenceLength($fromStart, $to); + $llE = $this->commonSubsequenceLength(array_reverse($fromEnd), array_reverse($to)); + $jMax = 0; + $max = 0; + + for ($j = 0; $j <= $cTo; $j++) { + $m = $llB[$j] + $llE[$cTo - $j]; + + if ($m >= $max) { + $max = $m; + $jMax = $j; + } + } + + $toStart = array_slice($to, 0, $jMax); + $toEnd = array_slice($to, $jMax); + + return array_merge( + $this->calculateCommonSubsequence($fromStart, $toStart), + $this->calculateCommonSubsequence($fromEnd, $toEnd) + ); + } + + private function commonSubsequenceLength(array $from, array $to): array + { + $current = array_fill(0, count($to) + 1, 0); + $cFrom = count($from); + $cTo = count($to); + + for ($i = 0; $i < $cFrom; $i++) { + $prev = $current; + + for ($j = 0; $j < $cTo; $j++) { + if (($this->isEqual)($from[$i], $to[$j])) { + $current[$j + 1] = $prev[$j] + 1; + } else { + $current[$j + 1] = max($current[$j], $prev[$j + 1]); + } + } + } + + return $current; + } +} + +class DiffOutputBuilder +{ + public function getDiff(array $diffs): string + { + global $context_line_count; + $i = 0; + $number_len = max(3, strlen((string)count($diffs))); + $line_number_spec = '%0' . $number_len . 'd'; + $buffer = fopen('php://memory', 'r+b'); + while ($i < count($diffs)) { + // Find next difference + $next = $i; + while ($next < count($diffs)) { + if ($diffs[$next][1] !== Differ::OLD) { + break; + } + $next++; + } + // Found no more differentiating rows, we're done + if ($next === count($diffs)) { + if (($i - 1) < count($diffs)) { + fwrite($buffer, "--\n"); + } + break; + } + // Print separator if necessary + if ($i < ($next - $context_line_count)) { + fwrite($buffer, "--\n"); + $i = $next - $context_line_count; + } + // Print leading context + while ($i < $next) { + fwrite($buffer, str_repeat(' ', $number_len + 2)); + fwrite($buffer, $diffs[$i][0]); + fwrite($buffer, "\n"); + $i++; + } + // Print differences + while ($i < count($diffs) && $diffs[$i][1] !== Differ::OLD) { + fwrite($buffer, sprintf($line_number_spec, $diffs[$i][2])); + switch ($diffs[$i][1]) { + case Differ::ADDED: + fwrite($buffer, '+ '); + break; + case Differ::REMOVED: + fwrite($buffer, '- '); + break; + } + fwrite($buffer, $diffs[$i][0]); + fwrite($buffer, "\n"); + $i++; + } + // Print trailing context + $afterContext = min($i + $context_line_count, count($diffs)); + while ($i < $afterContext && $diffs[$i][1] === Differ::OLD) { + fwrite($buffer, str_repeat(' ', $number_len + 2)); + fwrite($buffer, $diffs[$i][0]); + fwrite($buffer, "\n"); + $i++; + } + } + + $diff = stream_get_contents($buffer, -1, 0); + fclose($buffer); + + return $diff; + } +} + +main(); From dcd3a163ffc318038763ba01ad1ff27030e13c44 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 13:21:13 +0000 Subject: [PATCH 050/170] ensure compatibility with php <= php 7.4 --- lib/php-extension/run-tests.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/php-extension/run-tests.php b/lib/php-extension/run-tests.php index d0befa373..1ac10e617 100644 --- a/lib/php-extension/run-tests.php +++ b/lib/php-extension/run-tests.php @@ -3167,7 +3167,7 @@ function show_start(int $start_timestamp): void echo "TIME START " . date('Y-m-d H:i:s', $start_timestamp) . "\n=====================================================================\n"; } -function show_end(int $start_timestamp, int|float $start_time, int|float $end_time): void +function show_end(int $start_timestamp, int $start_time, float $end_time): void { echo "=====================================================================\nTIME END " . date('Y-m-d H:i:s', $start_timestamp + (int)(($end_time - $start_time)/1e9)) . "\n"; } From 6fa5dde4329687a5aed2136543650dfac684bf0d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 13:55:19 +0000 Subject: [PATCH 051/170] Add phpize step in CLI test workflows for PHP run-tests.sh --- .github/workflows/build.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6faa37370..843ccc58a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -446,6 +446,9 @@ jobs: - name: Run CLI tests run: | export TEST_PHP_EXECUTABLE=/usr/bin/php + cd lib/php-extension/ + phpize + cd ../../ php lib/php-extension/run-tests.php ./tests/cli - name: Run ${{ matrix.server }} server tests @@ -510,6 +513,10 @@ jobs: - name: Run CLI tests run: | + export TEST_PHP_EXECUTABLE=/usr/local/bin/php + cd lib/php-extension/ + phpize + cd ../../ php lib/php-extension/run-tests.php ./tests/cli - name: Run ${{ matrix.server }} server tests @@ -579,6 +586,10 @@ jobs: - name: Run CLI tests run: | + export TEST_PHP_EXECUTABLE=/usr/local/bin/php + cd lib/php-extension/ + phpize + cd ../../ php lib/php-extension/run-tests.php ./tests/cli - name: Run ${{ matrix.server }} server tests @@ -668,6 +679,9 @@ jobs: - name: Run CLI tests run: | export TEST_PHP_EXECUTABLE=/usr/local/bin/php + cd lib/php-extension/ + phpize + cd ../../ php lib/php-extension/run-tests.php ./tests/cli - name: Run ${{ matrix.server }} server tests From e877252ecacfb0f46dc9572b1ac564c4e8f7dda8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 14:19:25 +0000 Subject: [PATCH 052/170] Update CentOS and Ubuntu Dockerfiles to build PHP from source in NTS mode, enhancing test infrastructure and dependencies. --- .../workflows/Dockerfile.centos-php-test-nts | 161 ++++++++++++-- .../workflows/Dockerfile.ubuntu-php-test-nts | 202 ++++++++++++------ 2 files changed, 277 insertions(+), 86 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 3e2bd8084..75709fe38 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -1,36 +1,153 @@ # syntax=docker/dockerfile:1.7 -# CentOS Stream 9 test image with PHP (from Remi) preinstalled per version, -# plus httpd (mod_php), nginx + php-fpm, MySQL server and Python deps. +# CentOS Stream 9 test image with PHP built from source in NTS mode +# Used for testing the extension with standard PHP (non-thread-safe) ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} -FROM ${BASE_IMAGE} +FROM ${BASE_IMAGE} AS base SHELL ["/bin/bash", "-euo", "pipefail", "-c"] +ENV TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} -# Remi repo + chosen PHP stream -ARG PHP_VERSION -RUN yum install -y yum-utils -RUN dnf -y install https://rpms.remirepo.net/enterprise/remi-release-9.rpm -RUN yum install -y gcc -RUN yum install -y python3-devel -RUN dnf --assumeyes module reset php -RUN dnf --assumeyes --nogpgcheck module install php:remi-${PHP_VERSION} -RUN dnf --assumeyes install php-pdo -RUN dnf --assumeyes install php-mysqlnd -RUN if [ "$(printf '%s\n' "${PHP_VERSION}" "8.5" | sort -V | head -n1)" != "8.5" ]; then \ - dnf --assumeyes install php-opcache || true; \ - fi -RUN yum install -y mod_php nginx php-fpm procps-ng mysql-server +RUN yum install -y yum-utils && \ + dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true +# Install minimal tools needed for re2c build (replace curl-minimal with full curl) +RUN yum install -y xz tar gcc gcc-c++ make -# Python deps used by your test harness -RUN python3 -m pip install --no-cache-dir --upgrade pip \ - && python3 -m pip install --no-cache-dir flask requests psutil +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz + +# Install remaining build dependencies and tools +RUN yum install -y autoconf bison pkgconfig \ + libxml2-devel sqlite-devel libcurl-devel openssl-devel \ + libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ + libicu-devel readline-devel libxslt-devel \ + git wget \ + python3 python3-devel python3-pip \ + nginx httpd procps-ng mysql-server \ + && yum clean all + +# Install mariadb-devel separately (may need different repo or skip if not critical) +RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" + +# Fetch and build PHP from source with NTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac +RUN ./buildconf --force + +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + +# Build PHP with NTS (no ZTS flags) +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --with-extra-version="" \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +# Final image with PHP and test infrastructure +FROM base AS final +COPY --from=php-build /usr/local /usr/local + +# Verify NTS (ZTS should NOT be enabled) +RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should be NTS!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" + +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + +RUN mkdir -p /etc/php-fpm.d && \ + mkdir -p /run/php-fpm && \ + mkdir -p /var/run && \ + mkdir -p /var/log/php-fpm && \ + mkdir -p /etc/httpd || true && \ + mkdir -p /usr/local/etc/php-fpm.d && \ + mkdir -p /usr/local/etc/php/conf.d && \ + + ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ + + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true + +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + +# Python deps used by test harness +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir flask requests psutil - RUN yum install -y httpd # Quality-of-life -ENV TZ=Etc/UTC WORKDIR /work CMD ["bash"] diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index c17fc8b34..9c9fc6fbc 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -1,49 +1,154 @@ # syntax=docker/dockerfile:1.7 -FROM ubuntu:24.04 +# Ubuntu test image with PHP built from source in NTS mode +# Used for testing the extension with standard PHP (non-thread-safe) ARG DEBIAN_FRONTEND=noninteractive -ARG PHP_VERSION=7.2 +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ENV PHP_VERSION=${PHP_VERSION} +FROM ubuntu:24.04 AS base +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + LANGUAGE=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} + +# Install base dependencies and build tools RUN apt-get update && \ apt-get install -y --no-install-recommends \ ca-certificates curl gnupg lsb-release tzdata locales \ software-properties-common apt-transport-https \ git make unzip xz-utils \ - # web servers & DB (installed later after PPA) - && rm -rf /var/lib/apt/lists/* + build-essential autoconf bison re2c pkg-config \ + libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ + libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ + libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + && rm -rf /var/lib/apt/lists/* # Timezone to UTC -RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime && \ - echo "Etc/UTC" > /etc/timezone && \ +RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ + echo "${TZ}" > /etc/timezone && \ dpkg-reconfigure -f noninteractive tzdata - -RUN add-apt-repository -y universe && \ - add-apt-repository -y ppa:ondrej/php - -RUN apt-get update - -RUN set -eux; \ - PHP_PKG="php${PHP_VERSION}"; \ +# Fetch and build PHP from source with NTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac +RUN ./buildconf --force + +# Patch openssl.c for OpenSSL compatibility +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + +# Build PHP with NTS (no ZTS flags) +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --with-extra-version="" \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +# Final image with PHP and test infrastructure +FROM base AS final +COPY --from=php-build /usr/local /usr/local + +# Verify NTS (ZTS should NOT be enabled) +RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should be NTS!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" + +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + +RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ + mkdir -p /etc/php/${PHP_VER}/fpm && \ + mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ + mkdir -p /run/php && \ + mkdir -p /run/php-fpm && \ + mkdir -p /var/run && \ + mkdir -p /var/log && \ + mkdir -p /usr/local/etc/php-fpm.d && \ + mkdir -p /usr/local/etc/php/conf.d && \ + + ln -sf /usr/local/etc/php/conf.d /etc/php/${PHP_VER}/fpm/conf.d || true && \ + + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php${PHP_VER}-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php/${PHP_VER}/fpm/pool.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true + +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + +# Install web servers and database (without PHP packages) +RUN apt-get update && \ apt-get install -y --no-install-recommends \ nginx \ apache2 \ mariadb-server \ - ${PHP_PKG} ${PHP_PKG}-cli ${PHP_PKG}-common ${PHP_PKG}-fpm \ - ${PHP_PKG}-curl ${PHP_PKG}-sqlite3 ${PHP_PKG}-mysql \ - ${PHP_PKG}-mbstring ${PHP_PKG}-xml ${PHP_PKG}-zip \ - libapache2-mod-php${PHP_VERSION} \ - ; \ - # Apache: switch to prefork for mod_php scenario and enable rewrite - a2dismod mpm_event || true; \ - a2dismod mpm_worker || true; \ - a2enmod mpm_prefork rewrite || true - -RUN if [ "$(printf '%s\n' "${PHP_VERSION}" "8.5" | sort -V | head -n1)" != "8.5" ]; then \ - apt-get install -y --no-install-recommends php${PHP_VERSION}-opcache; \ - fi + apache2-bin \ + && rm -rf /var/lib/apt/lists/* + +# Apache: switch to prefork for mod_php scenario and enable rewrite +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork rewrite cgi cgid || true # ---- Python toolchain used by tests ---- ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ @@ -58,17 +163,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ flask pandas psutil requests \ && apt-get clean && rm -rf /var/lib/apt/lists/* -# PHP-CGI + Apache CGI modules for tests that require CGI -RUN set -eux; \ - apt-get update; \ - apt-get install -y --no-install-recommends \ - php${PHP_VERSION}-cgi \ - apache2-bin; \ - a2enmod cgi cgid || true; \ - mkdir -p /usr/lib/cgi-bin; \ - # Provide a php-cgi wrapper in the standard location - ln -sf /usr/bin/php-cgi /usr/lib/cgi-bin/php-cgi - # Helper: start MariaDB RUN mkdir -p /usr/local/bin /var/lib/mysql /run/mysqld && \ printf '%s\n' '#!/usr/bin/env bash' \ @@ -87,31 +181,11 @@ RUN mkdir -p /usr/local/bin /var/lib/mysql /run/mysqld && \ > /usr/local/bin/start-mariadb && \ chmod +x /usr/local/bin/start-mariadb -# Robust Apache PHP switcher (handles module names, MPM, restart, verification) -RUN printf '%s\n' '#!/usr/bin/env bash' \ - 'set -euo pipefail' \ - 'ver="${1:-${PHP_VERSION:-8.2}}"' \ - 'a2dismod mpm_event >/dev/null 2>&1 || true' \ - 'a2dismod mpm_worker >/dev/null 2>&1 || true' \ - 'a2enmod mpm_prefork >/dev/null 2>&1 || true' \ - 'if ! a2query -m "php${ver}" >/dev/null 2>&1; then' \ - ' apt-get update && apt-get install -y --no-install-recommends "libapache2-mod-php${ver}"' \ - 'fi' \ - 'for m in php5 php7 php7.0 php7.1 php7.2 php7.3 php7.4 php8 php8.0 php8.1 php8.2 php8.3 php8.4 php8.5; do' \ - ' a2query -m "$m" >/dev/null 2>&1 && a2dismod "$m" >/dev/null 2>&1 || true' \ - 'done' \ - 'a2enmod "php${ver}"' \ - 'apache2ctl -t' \ - 'apache2ctl -k graceful || apache2ctl -k restart' \ - 'if ! apache2ctl -M 2>/dev/null | grep -Eiq "php[0-9]*_module"; then' \ - ' echo "Apache does not have a PHP module loaded:"' \ - ' apache2ctl -M || true' \ - ' exit 1' \ - 'fi' \ - 'echo "Apache now using mod_php for PHP ${ver}"' \ - > /usr/local/bin/a2-switch-php && \ - chmod +x /usr/local/bin/a2-switch-php - +# Create PHP-CGI symlink for CGI tests (using source-built PHP) +RUN mkdir -p /usr/lib/cgi-bin && \ + ln -sf /usr/local/bin/php-cgi /usr/lib/cgi-bin/php-cgi || \ + (echo "Note: php-cgi may not be available in source build" && true) WORKDIR /work +CMD ["bash"] From c9a5995f7e788ea13ccfb9346d59e6ce808a195b Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 14:34:59 +0000 Subject: [PATCH 053/170] official run-tests.php. now we run phpize before pipeline tests too --- lib/php-extension/run-tests.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/php-extension/run-tests.php b/lib/php-extension/run-tests.php index 1ac10e617..d0befa373 100644 --- a/lib/php-extension/run-tests.php +++ b/lib/php-extension/run-tests.php @@ -3167,7 +3167,7 @@ function show_start(int $start_timestamp): void echo "TIME START " . date('Y-m-d H:i:s', $start_timestamp) . "\n=====================================================================\n"; } -function show_end(int $start_timestamp, int $start_time, float $end_time): void +function show_end(int $start_timestamp, int|float $start_time, int|float $end_time): void { echo "=====================================================================\nTIME END " . date('Y-m-d H:i:s', $start_timestamp + (int)(($end_time - $start_time)/1e9)) . "\n"; } From ff8cd9d4a5e7154af51afc1f034c61b2975bb329 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 14:54:34 +0000 Subject: [PATCH 054/170] Enhance PHP-FPM setup in build workflow to verify NTS configuration and ensure symlink functionality for nginx. Added checks for PHP-FPM version and thread safety. --- .github/workflows/build.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 843ccc58a..bc53e31bb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -490,9 +490,15 @@ jobs: ${{ env.AIKIDO_DEB }} - name: Prepare php-fpm + if: matrix.server == 'nginx-php-fpm' run: | - ls -l /usr/sbin | grep php - ln -s /usr/sbin/php-fpm${{ matrix.php_version }} /usr/sbin/php-fpm + # Verify NTS-built PHP-FPM exists and is NTS (not ZTS-enabled) + /usr/local/sbin/php-fpm -v + /usr/local/sbin/php-fpm -i | grep -q "Thread Safety => disabled" || (echo "ERROR: PHP-FPM not built with NTS!" && exit 1) + # Create symlink for nginx to find php-fpm + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + # Verify symlink works + php-fpm -i | grep -q "Thread Safety => disabled" || (echo "ERROR: php-fpm symlink not working or not NTS!" && exit 1) # MariaDB startup compatible with your current approach - name: Start MariaDB (background) From b4eeff5073b85f465611e18e57964c5874746c0d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 15:44:42 +0000 Subject: [PATCH 055/170] Create extension dir folder in test images --- .github/workflows/Dockerfile.centos-php-test-nts | 8 ++++++++ .github/workflows/Dockerfile.centos-php-test-zts | 8 ++++++++ .github/workflows/Dockerfile.ubuntu-php-test-nts | 8 ++++++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 8 ++++++++ 4 files changed, 32 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 75709fe38..99eba5b89 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -90,6 +90,14 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" \ + echo "Created extension_dir: $EXTENSION_DIR" + # Verify NTS (ZTS should NOT be enabled) RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should be NTS!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index df4b85c8f..82f397bd9 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -92,6 +92,14 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" \ + echo "Created extension_dir: $EXTENSION_DIR" + # Verify ZTS is enabled RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 9c9fc6fbc..0ff0da8b6 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -80,6 +80,14 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" \ + echo "Created extension_dir: $EXTENSION_DIR" + # Verify NTS (ZTS should NOT be enabled) RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should be NTS!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c143d3654..2254d26b2 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -82,6 +82,14 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" \ + echo "Created extension_dir: $EXTENSION_DIR" + # Verify ZTS is enabled RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null From cc0c7c483b58d0f352b5f45413e611db55214584 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 15:50:59 +0000 Subject: [PATCH 056/170] Enable caching in CentOS and Ubuntu PHP test image workflows --- .github/workflows/build-centos-php-test-images-nts.yml | 4 ++-- .github/workflows/build-ubuntu-php-test-images-nts.yml | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-centos-php-test-images-nts.yml b/.github/workflows/build-centos-php-test-images-nts.yml index 6546ac349..5ac637261 100644 --- a/.github/workflows/build-centos-php-test-images-nts.yml +++ b/.github/workflows/build-centos-php-test-images-nts.yml @@ -38,8 +38,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm diff --git a/.github/workflows/build-ubuntu-php-test-images-nts.yml b/.github/workflows/build-ubuntu-php-test-images-nts.yml index c218942a4..0e6cd4ebe 100644 --- a/.github/workflows/build-ubuntu-php-test-images-nts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-nts.yml @@ -36,8 +36,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm @@ -62,8 +62,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} - #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} - #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max publish-manifests: runs-on: ubuntu-24.04 From 72b4acea6645ae10496e5ce9c969a663bd6dbac6 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 17:14:13 +0000 Subject: [PATCH 057/170] Refactor exception handling in Action.cpp to differentiate behavior based on SAPI name, ensuring correct response code setting for frankenphp and other environments. Remove unnecessary Apache PHP module activation step from build workflow. --- .github/workflows/build.yml | 6 ------ lib/php-extension/Action.cpp | 16 +++++++++++++--- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index bc53e31bb..451d97025 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -507,12 +507,6 @@ jobs: sleep 5 mysql -u root -ppwd -e "SELECT 1" || (echo "MySQL not up" && exit 1) - # For Apache mod_php tests, ensure the right PHP module is active - - name: Ensure Apache uses PHP ${{ matrix.php_version }} - if: matrix.server == 'apache-mod-php' - run: | - a2-switch-php ${{ matrix.php_version }} - - name: Install DEB run: | dpkg -i -E ${{ env.AIKIDO_DEB }}/${{ env.AIKIDO_DEB }} diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index 3fa8b2d39..a356e3a2a 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -3,9 +3,19 @@ ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); - zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); - CallPhpFunctionWithOneParam("http_response_code", _code); - zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); + + const auto& sapiName = AIKIDO_GLOBAL(sapi_name); + + // For frankenphp, throw exception first; for others (cli-server, apache2handler, etc.), set response code first + if (sapiName == "frankenphp") { + zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); + CallPhpFunctionWithOneParam("http_response_code", _code); + + } else { + CallPhpFunctionWithOneParam("http_response_code", _code); + zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); + } + return BLOCK; } From 2aed4f669b7c6e7585da099c486e4705f683d6de Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 19:02:59 +0000 Subject: [PATCH 058/170] build mod_php --- .github/workflows/Dockerfile.centos-php-test-nts | 3 ++- .github/workflows/Dockerfile.ubuntu-php-test-nts | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 99eba5b89..93120d7ff 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -37,7 +37,7 @@ RUN yum install -y autoconf bison pkgconfig \ libicu-devel readline-devel libxslt-devel \ git wget \ python3 python3-devel python3-pip \ - nginx httpd procps-ng mysql-server \ + nginx httpd httpd-devel procps-ng mysql-server \ && yum clean all # Install mariadb-devel separately (may need different repo or skip if not critical) @@ -82,6 +82,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ + --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 0ff0da8b6..bed1f1c79 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -26,6 +26,7 @@ RUN apt-get update && \ libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + apache2-dev \ && rm -rf /var/lib/apt/lists/* # Timezone to UTC @@ -72,6 +73,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ + --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From d9fb10bd7e2d906a0c3fb53ac5097a903097383c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 21:05:00 +0000 Subject: [PATCH 059/170] Fix apxs2 path in Ubuntu PHP test Dockerfile --- .github/workflows/Dockerfile.ubuntu-php-test-nts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index bed1f1c79..9ba6b7f84 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -73,7 +73,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs \ + --with-apxs2=/usr/bin/apxs2 \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From 6cb98049cf7153a898e77e4a286e9cc680e3e173 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 21:10:12 +0000 Subject: [PATCH 060/170] test dockerfile update --- .github/workflows/Dockerfile.ubuntu-php-test-nts | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 9ba6b7f84..86a1cbd62 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -26,7 +26,11 @@ RUN apt-get update && \ libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + apache2 \ + apache2-bin \ apache2-dev \ + nginx \ + mariadb-server \ && rm -rf /var/lib/apt/lists/* # Timezone to UTC @@ -146,15 +150,6 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Install web servers and database (without PHP packages) -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - nginx \ - apache2 \ - mariadb-server \ - apache2-bin \ - && rm -rf /var/lib/apt/lists/* - # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ From 068d02ba92097b730eca7b74a8c9ca6816205e22 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 21:50:27 +0000 Subject: [PATCH 061/170] . --- .github/workflows/Dockerfile.centos-php-test-nts | 2 -- .github/workflows/Dockerfile.centos-php-test-zts | 2 -- .github/workflows/Dockerfile.ubuntu-php-test-nts | 2 -- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 -- 4 files changed, 8 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 93120d7ff..95b4305da 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -71,8 +71,6 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ - --with-config-file-path=/usr/local/lib \ - --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 82f397bd9..e5026cc1a 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -71,8 +71,6 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ - --with-config-file-path=/usr/local/lib \ - --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ --enable-maintainer-zts \ --enable-fpm \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 86a1cbd62..c896e8e48 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -66,8 +66,6 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ - --with-config-file-path=/usr/local/lib \ - --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 2254d26b2..089b9ea31 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -61,8 +61,6 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ - --with-config-file-path=/usr/local/lib \ - --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ --enable-maintainer-zts \ --enable-fpm \ From a6bc0164ac0bbeceb0c9fa2b77761587416223b0 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 22:16:04 +0000 Subject: [PATCH 062/170] test --- .github/workflows/Dockerfile.centos-php-test-nts | 5 +++++ .github/workflows/Dockerfile.centos-php-test-zts | 5 +++++ .github/workflows/Dockerfile.ubuntu-php-test-nts | 5 +++++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 +++++ 4 files changed, 20 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 95b4305da..ddfac18ca 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -71,6 +71,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ @@ -151,6 +153,9 @@ RUN mkdir -p /etc/php-fpm.d && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ python3 -m pip install --no-cache-dir flask requests psutil diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index e5026cc1a..93a4ac11c 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -71,6 +71,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ --enable-maintainer-zts \ --enable-fpm \ @@ -152,6 +154,9 @@ RUN mkdir -p /etc/php-fpm.d && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + # Install FrankenPHP binary based on PHP version # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index c896e8e48..19ff0d7eb 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -66,6 +66,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ @@ -148,6 +150,9 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 089b9ea31..0e6946ebd 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -61,6 +61,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ --enable-zts \ --enable-maintainer-zts \ --enable-fpm \ @@ -144,6 +146,9 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ RUN mkdir -p /usr/local/etc/php/conf.d && \ echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + # Install FrankenPHP binary based on PHP version # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ From 4d8dae5066057a117a8a276f6ac3dee4c328e267 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 23:54:48 +0000 Subject: [PATCH 063/170] Add Apache module directories to CentOS and Ubuntu PHP test Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-nts | 3 +++ .github/workflows/Dockerfile.centos-php-test-zts | 4 ++++ .github/workflows/Dockerfile.ubuntu-php-test-nts | 3 +++ .github/workflows/Dockerfile.ubuntu-php-test-zts | 4 ++++ 4 files changed, 14 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index ddfac18ca..ef6830ec0 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -91,6 +91,9 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN mkdir -p /usr/lib64/httpd/modules +COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ + RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ echo "Error: Could not determine extension_dir"; \ diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 93a4ac11c..10298784a 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -84,6 +84,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ + --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -92,6 +93,9 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN mkdir -p /usr/lib64/httpd/modules +COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ + RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ echo "Error: Could not determine extension_dir"; \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 19ff0d7eb..7e65d2a54 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -86,6 +86,9 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN mkdir -p /usr/lib/apache2/modules +COPY --from=php-build /usr/lib/apache2/modules/ /usr/lib/apache2/modules/ + RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ echo "Error: Could not determine extension_dir"; \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 0e6946ebd..75d5ff535 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -74,6 +74,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ + --with-apxs2=/usr/bin/apxs2 \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -82,6 +83,9 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN mkdir -p /usr/lib/apache2/modules +COPY --from=php-build /usr/lib/apache2/modules/ /usr/lib/apache2/modules/ + RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ echo "Error: Could not determine extension_dir"; \ From ec5dc386a0cf3b72c2d960123f2a36c5041fac21 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 10 Dec 2025 23:57:45 +0000 Subject: [PATCH 064/170] Fix apxs2 path in Ubuntu PHP test Dockerfiles to use correct binary --- .github/workflows/Dockerfile.ubuntu-php-test-nts | 2 +- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 7e65d2a54..74e29f759 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -77,7 +77,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs2 \ + --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 75d5ff535..08c28e9b1 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -74,7 +74,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs2 \ + --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From 136bb3e2f0f9b33ca8b1c0423b0de4fe40006b55 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 00:07:05 +0000 Subject: [PATCH 065/170] test. --- .devcontainer/centos_php_test_nts/Dockerfile | 230 ++++++++++++++++++ .../workflows/Dockerfile.ubuntu-php-test-nts | 1 + 2 files changed, 231 insertions(+) create mode 100644 .devcontainer/centos_php_test_nts/Dockerfile diff --git a/.devcontainer/centos_php_test_nts/Dockerfile b/.devcontainer/centos_php_test_nts/Dockerfile new file mode 100644 index 000000000..ae217015c --- /dev/null +++ b/.devcontainer/centos_php_test_nts/Dockerfile @@ -0,0 +1,230 @@ +# syntax=docker/dockerfile:1.7 +# CentOS Stream 9 test image with PHP built from source in NTS mode +# Used for testing the extension with standard PHP (non-thread-safe) + +ARG BASE_IMAGE=quay.io/centos/centos:stream9 +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} + +FROM ${BASE_IMAGE} AS base +SHELL ["/bin/bash", "-euo", "pipefail", "-c"] + +ENV TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} + +RUN yum install -y yum-utils && \ + dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true + +# Install minimal tools needed for re2c build (replace curl-minimal with full curl) +RUN yum install -y xz tar gcc gcc-c++ make + +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz + +# Install remaining build dependencies and tools +RUN yum install -y autoconf bison pkgconfig \ + libxml2-devel sqlite-devel libcurl-devel openssl-devel \ + libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ + libicu-devel readline-devel libxslt-devel \ + git wget \ + python3 python3-devel python3-pip \ + nginx httpd httpd-devel procps-ng mysql-server \ + cpio unzip nano lsof jq rpmdevtools sudo \ + && yum clean all + +# Install mariadb-devel separately (may need different repo or skip if not critical) +RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" + +# Install Go toolchain (architecture-aware) +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + curl -O https://dl.google.com/go/go1.23.3.linux-amd64.tar.gz && \ + tar -C /usr/local -xzf go1.23.3.linux-amd64.tar.gz && \ + rm -f go1.23.3.linux-amd64.tar.gz; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + curl -O https://dl.google.com/go/go1.23.3.linux-arm64.tar.gz && \ + tar -C /usr/local -xzf go1.23.3.linux-arm64.tar.gz && \ + rm -f go1.23.3.linux-arm64.tar.gz; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi +ENV PATH="/usr/local/go/bin:${PATH}" + +# Install protoc and Go protobuf plugins +RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest \ + && go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + PROTOC_ZIP=protoc-28.3-linux-x86_64.zip && \ + curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ + unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ + unzip -o $PROTOC_ZIP -d /usr/local include/* && \ + rm -f $PROTOC_ZIP; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + PROTOC_ZIP=protoc-28.3-linux-aarch_64.zip && \ + curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ + unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ + unzip -o $PROTOC_ZIP -d /usr/local include/* && \ + rm -f $PROTOC_ZIP; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi +ENV PATH="$HOME/go/bin:${PATH}" + +# Fetch and build PHP from source with NTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac +RUN ./buildconf --force + +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + +# Build PHP with NTS (no ZTS flags) +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --with-extra-version="" \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ + --with-apxs2=/usr/bin/apxs \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +# Final image with PHP and test infrastructure +FROM base AS final +COPY --from=php-build /usr/local /usr/local +# Copy libphp.so module (installed by apxs to Apache modules directory, not /usr/local) +# Note: /usr/lib64/ is the standard path for 64-bit libraries on both x86_64 and aarch64 +# The architecture is determined by the binary itself (ELF header), not the directory path +# apxs installs to /usr/lib64/httpd/modules/ on CentOS/RHEL, or /usr/lib/httpd/modules/ on some distros +# We need to copy it from the build stage since COPY --from=php-build /usr/local only copies /usr/local +RUN mkdir -p /usr/lib64/httpd/modules /usr/lib/httpd/modules +# Copy from lib64 (standard on CentOS/RHEL for both x86_64 and aarch64) +# If libphp.so exists in the build stage, it will be copied; if not, COPY will fail gracefully +COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ + +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" \ + echo "Created extension_dir: $EXTENSION_DIR" + +# Verify NTS (ZTS should NOT be enabled) +RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should be NTS!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" + +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + +RUN mkdir -p /etc/php-fpm.d && \ + mkdir -p /run/php-fpm && \ + mkdir -p /var/run && \ + mkdir -p /var/log/php-fpm && \ + mkdir -p /etc/httpd || true && \ + mkdir -p /usr/local/etc/php-fpm.d && \ + mkdir -p /usr/local/etc/php/conf.d && \ + + ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ + + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true + +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + +# Configure Apache to load libphp.so module (if it exists) +RUN if [ -f /usr/lib64/httpd/modules/libphp.so ]; then \ + echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ + echo "LoadModule php_module modules/libphp.so" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Directory index" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf"; \ + else \ + echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + fi + +# Python deps used by test harness +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir flask requests psutil + +# Quality-of-life +WORKDIR /work +CMD ["bash"] + + diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 74e29f759..548c009e3 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -78,6 +78,7 @@ RUN ./configure \ --with-zlib \ --with-zip \ --with-apxs2=/usr/bin/apxs \ +&& mkdir -p /usr/lib/apache2/modules \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From 685ae8aba7dc522d5c7e74bfef74c9866809f098 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 03:29:50 +0200 Subject: [PATCH 066/170] . --- .github/workflows/Dockerfile.centos-php-test-nts | 8 +++----- .github/workflows/Dockerfile.centos-php-test-zts | 8 +++----- .github/workflows/Dockerfile.ubuntu-php-test-nts | 8 +++----- .github/workflows/Dockerfile.ubuntu-php-test-zts | 10 ++++------ 4 files changed, 13 insertions(+), 21 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index ef6830ec0..d1acb2090 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -152,12 +152,10 @@ RUN mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -RUN mkdir -p /usr/local/etc/php/conf.d && \ - echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /usr/local/etc/php/conf.d -RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ - echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 10298784a..7ed208347 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -154,12 +154,10 @@ RUN mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -RUN mkdir -p /usr/local/etc/php/conf.d && \ - echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /usr/local/etc/php/conf.d -RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ - echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Install FrankenPHP binary based on PHP version # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 548c009e3..c2124d1a0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -150,12 +150,10 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -RUN mkdir -p /usr/local/etc/php/conf.d && \ - echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /usr/local/etc/php/conf.d -RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ - echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 08c28e9b1..88b7b2ed9 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -74,7 +74,7 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs \ + --with-apxs2=/usr/bin/apxs2 \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -146,12 +146,10 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -RUN mkdir -p /usr/local/etc/php/conf.d && \ - echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +RUN mkdir -p /usr/local/etc/php/conf.d -RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ - echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" +# Configure MySQL socket path for mysqli (so "localhost" connections work) +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Install FrankenPHP binary based on PHP version # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 From 128004817d2fc43d903c8511168c93dcd2a6f1f4 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 03:34:12 +0200 Subject: [PATCH 067/170] . --- .github/workflows/Dockerfile.centos-php-test-nts | 5 +++-- .github/workflows/Dockerfile.centos-php-test-zts | 5 +++-- .github/workflows/Dockerfile.ubuntu-php-test-nts | 5 +++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 +++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index d1acb2090..c7d0b28b4 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -68,6 +68,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true +RUN mkdir -p /usr/local/etc/php/conf.d + # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -152,9 +154,8 @@ RUN mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true -RUN mkdir -p /usr/local/etc/php/conf.d - # Configure MySQL socket path for mysqli (so "localhost" connections work) +# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Python deps used by test harness diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7ed208347..c7ad1af2d 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -68,6 +68,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true +RUN mkdir -p /usr/local/etc/php/conf.d + # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ @@ -154,9 +156,8 @@ RUN mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true -RUN mkdir -p /usr/local/etc/php/conf.d - # Configure MySQL socket path for mysqli (so "localhost" connections work) +# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Install FrankenPHP binary based on PHP version diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index c2124d1a0..7ad5cb214 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -63,6 +63,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true +RUN mkdir -p /usr/local/etc/php/conf.d + # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -150,9 +152,8 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true -RUN mkdir -p /usr/local/etc/php/conf.d - # Configure MySQL socket path for mysqli (so "localhost" connections work) +# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Apache: switch to prefork for mod_php scenario and enable rewrite diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 88b7b2ed9..aeef5d03b 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -58,6 +58,8 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true +RUN mkdir -p /usr/local/etc/php/conf.d + # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ @@ -146,9 +148,8 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true -RUN mkdir -p /usr/local/etc/php/conf.d - # Configure MySQL socket path for mysqli (so "localhost" connections work) +# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Install FrankenPHP binary based on PHP version From 5e1564bfeb0be4c2373153fefc3125d5178515f1 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 03:40:33 +0200 Subject: [PATCH 068/170] ++ --- .github/workflows/Dockerfile.centos-php-test-zts | 3 +-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index c7ad1af2d..7110533a1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -37,7 +37,7 @@ RUN yum install -y autoconf bison pkgconfig \ libicu-devel readline-devel libxslt-devel \ git wget \ python3 python3-devel python3-pip \ - nginx httpd procps-ng mysql-server \ + nginx httpd httpd-devel procps-ng mysql-server \ && yum clean all # Install mariadb-devel separately (may need different repo or skip if not critical) @@ -86,7 +86,6 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index aeef5d03b..4eb40bd49 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -76,7 +76,6 @@ RUN ./configure \ --with-openssl \ --with-zlib \ --with-zip \ - --with-apxs2=/usr/bin/apxs2 \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From def7e1f3adf4be1200e13166fbb19a3bd3cfc4d7 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 03:48:26 +0200 Subject: [PATCH 069/170] .... --- .github/workflows/Dockerfile.centos-php-test-zts | 2 -- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 -- 2 files changed, 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7110533a1..b34574af4 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -94,8 +94,6 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN mkdir -p /usr/lib64/httpd/modules -COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 4eb40bd49..2cbd3f4d7 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -84,8 +84,6 @@ RUN ./configure \ FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN mkdir -p /usr/lib/apache2/modules -COPY --from=php-build /usr/lib/apache2/modules/ /usr/lib/apache2/modules/ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ From 203a372aab38ed9e4bd22abad0cbfff8030f3b0d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 13:31:49 +0200 Subject: [PATCH 070/170] Add Apache PHP module configuration for CentOS and Ubuntu Dockerfiles --- .../workflows/Dockerfile.centos-php-test-nts | 17 +++++++++++++++++ .../workflows/Dockerfile.ubuntu-php-test-nts | 19 +++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index c7d0b28b4..68b82d155 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -158,6 +158,23 @@ RUN mkdir -p /etc/php-fpm.d && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +# Configure Apache to load libphp.so module (if it exists) +RUN if [ -f /usr/lib64/httpd/modules/libphp.so ]; then \ + echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ + echo "LoadModule php_module modules/libphp.so" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Directory index" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf"; \ + else \ + echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + fi + # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ python3 -m pip install --no-cache-dir flask requests psutil diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 7ad5cb214..3d5cf376e 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -156,6 +156,25 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini +# Configure Apache to load libphp.so module (if it exists) +RUN if [ -f /usr/lib/apache2/modules/libphp.so ]; then \ + mkdir -p /etc/apache2/conf-enabled && \ + echo "# Load PHP module for Apache" > /etc/apache2/conf-enabled/php.conf && \ + echo "LoadModule php_module /usr/lib/apache2/modules/libphp.so" >> /etc/apache2/conf-enabled/php.conf && \ + echo "" >> /etc/apache2/conf-enabled/php.conf && \ + echo "# Configure PHP file handling" >> /etc/apache2/conf-enabled/php.conf && \ + echo "" >> /etc/apache2/conf-enabled/php.conf && \ + echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-enabled/php.conf && \ + echo " " >> /etc/apache2/conf-enabled/php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-enabled/php.conf && \ + echo " " >> /etc/apache2/conf-enabled/php.conf && \ + echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-enabled/php.conf && \ + echo "" >> /etc/apache2/conf-enabled/php.conf && \ + echo "Created Apache PHP module configuration at /etc/apache2/conf-enabled/php.conf"; \ + else \ + echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + fi + # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ From e474b7edf2a32440ee95eedb4e1d23831ab82a6c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 15:52:36 +0200 Subject: [PATCH 071/170] Update Apache configuration in Dockerfiles to use prefork MPM and handle PHP module loading for different PHP versions --- .../workflows/Dockerfile.centos-php-test-nts | 51 ++++++++++++----- .../workflows/Dockerfile.centos-php-test-zts | 1 + .../workflows/Dockerfile.ubuntu-php-test-nts | 56 ++++++++++++------- .../workflows/Dockerfile.ubuntu-php-test-zts | 1 + 4 files changed, 76 insertions(+), 33 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 68b82d155..44b219ba3 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -70,6 +70,14 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d +# Configure Apache to use prefork MPM (non-threaded) before building PHP +# This prevents PHP's configure script from auto-enabling ZTS when --with-apxs2 is used +RUN mkdir -p /etc/httpd/conf.modules.d && \ + sed -i 's/^LoadModule mpm_event_module/#LoadModule mpm_event_module/g' /etc/httpd/conf.modules.d/*.conf 2>/dev/null || true && \ + sed -i 's/^LoadModule mpm_worker_module/#LoadModule mpm_worker_module/g' /etc/httpd/conf.modules.d/*.conf 2>/dev/null || true && \ + sed -i 's/^LoadModule mpm_prefork_module/#LoadModule mpm_prefork_module/g' /etc/httpd/conf.modules.d/*.conf 2>/dev/null || true && \ + echo "LoadModule mpm_prefork_module modules/mod_mpm_prefork.so" > /etc/httpd/conf.modules.d/00-mpm-prefork.conf + # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -78,6 +86,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -85,6 +94,7 @@ RUN ./configure \ --with-zlib \ --with-zip \ --with-apxs2=/usr/bin/apxs \ + --disable-zts \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -159,20 +169,35 @@ RUN mkdir -p /etc/php-fpm.d && \ RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Configure Apache to load libphp.so module (if it exists) -RUN if [ -f /usr/lib64/httpd/modules/libphp.so ]; then \ - echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ - echo "LoadModule php_module modules/libphp.so" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "# Directory index" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf"; \ +# PHP 7 uses libphp7.so, other versions use libphp.so +RUN PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + if [ "$PHP_MAJOR" = "7" ]; then \ + LIBPHP_NAME="libphp7.so"; \ + else \ + LIBPHP_NAME="libphp.so"; \ + fi && \ + if [ -f "/usr/lib64/httpd/modules/${LIBPHP_NAME}" ]; then \ + echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ + if [ "$PHP_MAJOR" = "7" ]; then \ + echo "LoadModule php7_module /usr/lib64/httpd/modules/${LIBPHP_NAME}" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf; \ + else \ + echo "LoadModule php_module /usr/lib64/httpd/modules/${LIBPHP_NAME}" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf; \ + fi && \ + echo " PHPIniDir /usr/local/etc/php" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " " >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " " >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf with ${LIBPHP_NAME}"; \ else \ - echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + echo "Warning: ${LIBPHP_NAME} not found, skipping Apache PHP module configuration"; \ fi # Python deps used by test harness diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index b34574af4..d24ccc51f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -80,6 +80,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 3d5cf376e..967b34e2f 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -65,6 +65,11 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d +# Apache: switch to prefork for mod_php scenario and enable rewrite +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork rewrite cgi cgid || true + # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -73,6 +78,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -80,6 +86,7 @@ RUN ./configure \ --with-zlib \ --with-zip \ --with-apxs2=/usr/bin/apxs \ + --disable-zts \ && mkdir -p /usr/lib/apache2/modules \ && make -j"$(nproc)" \ && make install \ @@ -157,29 +164,38 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini # Configure Apache to load libphp.so module (if it exists) -RUN if [ -f /usr/lib/apache2/modules/libphp.so ]; then \ - mkdir -p /etc/apache2/conf-enabled && \ - echo "# Load PHP module for Apache" > /etc/apache2/conf-enabled/php.conf && \ - echo "LoadModule php_module /usr/lib/apache2/modules/libphp.so" >> /etc/apache2/conf-enabled/php.conf && \ - echo "" >> /etc/apache2/conf-enabled/php.conf && \ - echo "# Configure PHP file handling" >> /etc/apache2/conf-enabled/php.conf && \ - echo "" >> /etc/apache2/conf-enabled/php.conf && \ - echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-enabled/php.conf && \ - echo " " >> /etc/apache2/conf-enabled/php.conf && \ - echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-enabled/php.conf && \ - echo " " >> /etc/apache2/conf-enabled/php.conf && \ - echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-enabled/php.conf && \ - echo "" >> /etc/apache2/conf-enabled/php.conf && \ - echo "Created Apache PHP module configuration at /etc/apache2/conf-enabled/php.conf"; \ +# PHP 7 uses libphp7.so, other versions use libphp.so +RUN PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + if [ "$PHP_MAJOR" = "7" ]; then \ + LIBPHP_NAME="libphp7.so"; \ + else \ + LIBPHP_NAME="libphp.so"; \ + fi && \ + if [ -f "/usr/lib/apache2/modules/${LIBPHP_NAME}" ]; then \ + echo "# Load PHP module for Apache" > /etc/apache2/conf-available/php.conf && \ + if [ "$PHP_MAJOR" = "7" ]; then \ + echo "LoadModule php7_module /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf; \ + else \ + echo "LoadModule php_module /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf; \ + fi && \ + echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-available/php.conf && \ + echo " " >> /etc/apache2/conf-available/php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-available/php.conf && \ + echo " " >> /etc/apache2/conf-available/php.conf && \ + echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + a2enconf php >/dev/null 2>&1 || true && \ + echo "Created Apache PHP module configuration with ${LIBPHP_NAME}"; \ else \ - echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + echo "Warning: ${LIBPHP_NAME} not found, skipping Apache PHP module configuration"; \ fi -# Apache: switch to prefork for mod_php scenario and enable rewrite -RUN a2dismod mpm_event || true && \ - a2dismod mpm_worker || true && \ - a2enmod mpm_prefork rewrite cgi cgid || true - # ---- Python toolchain used by tests ---- ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ PYTHONDONTWRITEBYTECODE=1 \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 2cbd3f4d7..bee963375 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -70,6 +70,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ From 1326cceb9d335bcfb4f2d7e0e8f7940c45818240 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 17:32:00 +0200 Subject: [PATCH 072/170] Add FrankenPHP testing workflows for PHP versions 8.2 to 8.5. --- .../workflows/Dockerfile.centos-php-test-nts | 3 +- .../workflows/Dockerfile.centos-php-test-zts | 35 +++-- .../workflows/Dockerfile.ubuntu-php-test-nts | 3 +- .../workflows/Dockerfile.ubuntu-php-test-zts | 29 ++-- .github/workflows/build.yml | 133 ++++++++++++++++++ tools/server_tests/apache/main.py | 9 +- 6 files changed, 188 insertions(+), 24 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-nts b/.github/workflows/Dockerfile.centos-php-test-nts index 44b219ba3..854530aa1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-nts +++ b/.github/workflows/Dockerfile.centos-php-test-nts @@ -121,7 +121,8 @@ RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /run/php-fpm && \ diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index d24ccc51f..eb5892330 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -111,7 +111,8 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /run/php-fpm && \ @@ -158,31 +159,41 @@ RUN mkdir -p /etc/php-fpm.d && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Install FrankenPHP binary based on PHP version -# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 -RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ +# Install FrankenPHP binary based on PHP version and architecture +# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + ARCH_SUFFIX="x86_64"; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + ARCH_SUFFIX="aarch64"; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi && \ + if [ "${PHP_VERSION}" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.x86_64.rpm" \ - && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ - && yum install -y /tmp/frankenphp.rpm \ - && rm -f /tmp/frankenphp.rpm; \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ + && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ + && chmod +x /usr/local/bin/frankenphp \ + && mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ elif [ "${PHP_VERSION}" = "8.4" ]; then \ FRANKENPHP_VERSION="1.9.1" \ - && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.x86_64.rpm" \ + && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.${ARCH_SUFFIX}.rpm" \ && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ && yum install -y /tmp/frankenphp.rpm \ && rm -f /tmp/frankenphp.rpm; \ elif [ "${PHP_VERSION}" = "8.3" ]; then \ FRANKENPHP_VERSION="1.3.2" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ elif [ "${PHP_VERSION}" = "8.2" ]; then \ - FRANKENPHP_VERSION="1.0.0-rc.3" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + FRANKENPHP_VERSION="1.1.0" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 967b34e2f..6606ad4ad 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -114,7 +114,8 @@ RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /etc/php/${PHP_VER}/fpm && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index bee963375..43a3873ae 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -101,7 +101,8 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ mkdir -p /etc/php/${PHP_VER}/fpm && \ @@ -150,11 +151,21 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Install FrankenPHP binary based on PHP version -# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.0.0-rc.3 -RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ +# Install FrankenPHP binary based on PHP version and architecture +# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + ARCH_SUFFIX="amd64"; \ + ARCH_BINARY="x86_64"; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + ARCH_SUFFIX="arm64"; \ + ARCH_BINARY="aarch64"; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi && \ + if [ "${PHP_VERSION}" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ @@ -162,7 +173,7 @@ RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ && mkdir -p /usr/lib/frankenphp/modules; \ elif [ "${PHP_VERSION}" = "8.4" ]; then \ FRANKENPHP_VERSION="1.9.1" \ - && FRANKENPHP_DEB_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp_${FRANKENPHP_VERSION}-1_amd64.deb" \ + && FRANKENPHP_DEB_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp_${FRANKENPHP_VERSION}-1_${ARCH_SUFFIX}.deb" \ && curl -fsSL -L -o /tmp/frankenphp.deb "$FRANKENPHP_DEB_URL" \ && apt-get update \ && apt-get install -y /tmp/frankenphp.deb \ @@ -170,15 +181,15 @@ RUN if [ "${PHP_VERSION}" = "8.5" ]; then \ && rm -rf /var/lib/apt/lists/*; \ elif [ "${PHP_VERSION}" = "8.3" ]; then \ FRANKENPHP_VERSION="1.3.2" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ elif [ "${PHP_VERSION}" = "8.2" ]; then \ - FRANKENPHP_VERSION="1.0.0-rc.3" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-x86_64" \ + FRANKENPHP_VERSION="1.1.0" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 451d97025..82021b3b5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -689,6 +689,139 @@ jobs: cd tools python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + test_php_frankenphp: + name: FrankenPHP php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + runs-on: ubuntu-24.04${{ matrix.arch }} + container: + image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v2 + options: --privileged + needs: [ build_rpm ] + strategy: + matrix: + php_version: ['8.2', '8.3', '8.4', '8.5'] + server: ['frankenphp-worker', 'frankenphp-classic'] + arch: ['', '-arm'] + fail-fast: false + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup + run: | + uname -a + cat /etc/centos-release || cat /etc/redhat-release || echo "CentOS/Stream detected" + php -v + which frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) + + - name: Verify ZTS is enabled + run: | + php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v + + - name: Install and start MySQL + run: | + mkdir -p /var/lib/mysql + mysqld --initialize-insecure --datadir=/var/lib/mysql + mysqld -u root --datadir=/var/lib/mysql --socket=/var/lib/mysql/mysql.sock & + sleep 10 + mysql -u root -e "CREATE DATABASE IF NOT EXISTS db;" + mysql -u root -e "ALTER USER 'root'@'localhost' IDENTIFIED BY 'pwd'; FLUSH PRIVILEGES;" + + - name: Test MySQL connection with mysqli + run: | + php -r ' + $mysqli = new mysqli("localhost", "root", "pwd", "db"); + if ($mysqli->connect_error) { + echo "MySQL connection failed: " . $mysqli->connect_error . "\n"; + exit(1); + } else { + echo "MySQL connection successful\n"; + $mysqli->close(); + } + ' + + - name: Get Arch + run: echo "ARCH=$(uname -m)" >> $GITHUB_ENV + + - name: Get Aikido version + run: | + AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') + echo $AIKIDO_VERSION + echo "AIKIDO_VERSION=$AIKIDO_VERSION" >> $GITHUB_ENV + echo "AIKIDO_RPM=aikido-php-firewall.${{ env.ARCH }}.rpm" >> $GITHUB_ENV + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + pattern: | + ${{ env.AIKIDO_RPM }} + + - name: Install RPM + run: | + rpm -Uvh --oldpackage ${{ env.AIKIDO_RPM }}/${{ env.AIKIDO_RPM }} + + - name: Run ${{ matrix.server }} server tests + run: | + cd tools + python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + + test_php_frankenphp_ubuntu: + name: Ubuntu FrankenPHP php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + runs-on: ubuntu-24.04${{ matrix.arch }} + container: + image: ghcr.io/aikidosec/firewall-php-test-ubuntu-zts:${{ matrix.php_version }}-v2 + options: --privileged + needs: [ build_deb ] + strategy: + matrix: + php_version: ['8.2', '8.3', '8.4', '8.5'] + server: ['frankenphp-worker', 'frankenphp-classic'] + arch: ['', '-arm'] + fail-fast: false + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Get Arch + run: echo "ARCH=$(uname -m)" >> $GITHUB_ENV + + - name: Set env + run: | + AIKIDO_VERSION=$(grep '#define PHP_AIKIDO_VERSION' lib/php-extension/include/php_aikido.h | awk -F'"' '{print $2}') + echo $AIKIDO_VERSION + echo "AIKIDO_VERSION=$AIKIDO_VERSION" >> $GITHUB_ENV + echo "AIKIDO_DEB=aikido-php-firewall.${{ env.ARCH }}.deb" >> $GITHUB_ENV + + - name: Verify ZTS is enabled + run: | + php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) + php -v + + - name: Verify FrankenPHP is installed + run: | + which frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) + + - name: Start MariaDB (background) + run: | + start-mariadb & # provided by the image + sleep 5 + mysql -u root -ppwd -e "SELECT 1" || (echo "MySQL not up" && exit 1) + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + pattern: | + ${{ env.AIKIDO_DEB }} + + - name: Install DEB + run: | + dpkg -i -E ${{ env.AIKIDO_DEB }}/${{ env.AIKIDO_DEB }} + + - name: Run ${{ matrix.server }} server tests + run: | + cd tools + python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 + test_php_qa_action_controlling_tests_apache_mod_php: name: QA apache-mod-php runs-on: ubuntu-latest diff --git a/tools/server_tests/apache/main.py b/tools/server_tests/apache/main.py index c0bc18efc..250e33ab7 100755 --- a/tools/server_tests/apache/main.py +++ b/tools/server_tests/apache/main.py @@ -295,7 +295,14 @@ def apache_mod_php_process_test(test_data): def apache_mod_php_pre_tests(): - subprocess.run([f'/usr/sbin/{apache_binary}', '-k', 'start']) + if not os.path.exists('/etc/httpd'): + # Debian/Ubuntu Apache - use apache2ctl which sources /etc/apache2/envvars + # This ensures APACHE_RUN_DIR and other variables are properly set + # apache2ctl will source envvars and then start Apache with the correct environment + subprocess.run(['/usr/sbin/apache2ctl', 'start'], check=True) + else: + # CentOS/RHEL Apache + subprocess.run([f'/usr/sbin/{apache_binary}', '-k', 'start'], check=True) def apache_mod_php_start_server(test_data, test_lib_dir, valgrind): From a55770420c490afd2f48fccd3df2aa3c75c85f71 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:24:13 +0200 Subject: [PATCH 073/170] Update Dockerfiles and GitHub Actions workflows to improve FrankenPHP installation and testing for PHP versions 8.2 to 8.5, including architecture detection and Apache configuration adjustments. --- .../workflows/Dockerfile.centos-php-test-zts | 11 +++-- .../workflows/Dockerfile.ubuntu-php-test-nts | 43 ++++++++++--------- .../workflows/Dockerfile.ubuntu-php-test-zts | 11 +++-- .github/workflows/build.yml | 2 +- 4 files changed, 38 insertions(+), 29 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index eb5892330..56afc779d 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -162,6 +162,7 @@ RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/ph # Install FrankenPHP binary based on PHP version and architecture # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 RUN ARCH=$(uname -m) && \ + PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ ARCH_SUFFIX="x86_64"; \ elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ @@ -169,7 +170,7 @@ RUN ARCH=$(uname -m) && \ else \ echo "Unsupported architecture: $ARCH" && exit 1; \ fi && \ - if [ "${PHP_VERSION}" = "8.5" ]; then \ + if [ "$PHP_VER" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -177,13 +178,13 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "${PHP_VERSION}" = "8.4" ]; then \ + elif [ "$PHP_VER" = "8.4" ]; then \ FRANKENPHP_VERSION="1.9.1" \ && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.${ARCH_SUFFIX}.rpm" \ && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ && yum install -y /tmp/frankenphp.rpm \ && rm -f /tmp/frankenphp.rpm; \ - elif [ "${PHP_VERSION}" = "8.3" ]; then \ + elif [ "$PHP_VER" = "8.3" ]; then \ FRANKENPHP_VERSION="1.3.2" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -191,7 +192,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "${PHP_VERSION}" = "8.2" ]; then \ + elif [ "$PHP_VER" = "8.2" ]; then \ FRANKENPHP_VERSION="1.1.0" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -199,6 +200,8 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ + else \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" && exit 1; \ fi # Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 6606ad4ad..4d40ce77d 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -65,7 +65,7 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d -# Apache: switch to prefork for mod_php scenario and enable rewrite +# Apache: switch to prefork for mod_php scenario and enable rewrite(needed by php build) RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ a2enmod mpm_prefork rewrite cgi cgid || true @@ -99,6 +99,12 @@ COPY --from=php-build /usr/local /usr/local RUN mkdir -p /usr/lib/apache2/modules COPY --from=php-build /usr/lib/apache2/modules/ /usr/lib/apache2/modules/ +# Configure Apache to use prefork MPM (non-threaded) for NTS PHP +# This must be done in the final stage since the base image has default MPM settings +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork || true + RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ echo "Error: Could not determine extension_dir"; \ @@ -164,27 +170,26 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Configure Apache to load libphp.so module (if it exists) +# Configure Apache to load libphp module (if it exists) # PHP 7 uses libphp7.so, other versions use libphp.so -RUN PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ - if [ "$PHP_MAJOR" = "7" ]; then \ +# Check for both possible filenames to handle any case +RUN if [ -f "/usr/lib/apache2/modules/libphp7.so" ]; then \ LIBPHP_NAME="libphp7.so"; \ - else \ + MODULE_NAME="php7_module"; \ + elif [ -f "/usr/lib/apache2/modules/libphp.so" ]; then \ LIBPHP_NAME="libphp.so"; \ + MODULE_NAME="php_module"; \ + else \ + echo "Warning: No libphp module found in /usr/lib/apache2/modules/" && \ + ls -la /usr/lib/apache2/modules/ || true && \ + exit 0; \ fi && \ - if [ -f "/usr/lib/apache2/modules/${LIBPHP_NAME}" ]; then \ + if [ -n "$LIBPHP_NAME" ]; then \ echo "# Load PHP module for Apache" > /etc/apache2/conf-available/php.conf && \ - if [ "$PHP_MAJOR" = "7" ]; then \ - echo "LoadModule php7_module /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf && \ - echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf; \ - else \ - echo "LoadModule php_module /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf && \ - echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf; \ - fi && \ + echo "LoadModule ${MODULE_NAME} /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-available/php.conf && \ echo " " >> /etc/apache2/conf-available/php.conf && \ echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-available/php.conf && \ @@ -192,9 +197,7 @@ RUN PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-available/php.conf && \ echo "" >> /etc/apache2/conf-available/php.conf && \ a2enconf php >/dev/null 2>&1 || true && \ - echo "Created Apache PHP module configuration with ${LIBPHP_NAME}"; \ - else \ - echo "Warning: ${LIBPHP_NAME} not found, skipping Apache PHP module configuration"; \ + echo "Created Apache PHP module configuration with ${LIBPHP_NAME} (${MODULE_NAME})"; \ fi # ---- Python toolchain used by tests ---- diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 43a3873ae..c9b4cbd64 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -154,6 +154,7 @@ RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/ph # Install FrankenPHP binary based on PHP version and architecture # PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 RUN ARCH=$(uname -m) && \ + PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ ARCH_SUFFIX="amd64"; \ ARCH_BINARY="x86_64"; \ @@ -163,7 +164,7 @@ RUN ARCH=$(uname -m) && \ else \ echo "Unsupported architecture: $ARCH" && exit 1; \ fi && \ - if [ "${PHP_VERSION}" = "8.5" ]; then \ + if [ "$PHP_VER" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -171,7 +172,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "${PHP_VERSION}" = "8.4" ]; then \ + elif [ "$PHP_VER" = "8.4" ]; then \ FRANKENPHP_VERSION="1.9.1" \ && FRANKENPHP_DEB_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp_${FRANKENPHP_VERSION}-1_${ARCH_SUFFIX}.deb" \ && curl -fsSL -L -o /tmp/frankenphp.deb "$FRANKENPHP_DEB_URL" \ @@ -179,7 +180,7 @@ RUN ARCH=$(uname -m) && \ && apt-get install -y /tmp/frankenphp.deb \ && rm -f /tmp/frankenphp.deb \ && rm -rf /var/lib/apt/lists/*; \ - elif [ "${PHP_VERSION}" = "8.3" ]; then \ + elif [ "$PHP_VER" = "8.3" ]; then \ FRANKENPHP_VERSION="1.3.2" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -187,7 +188,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "${PHP_VERSION}" = "8.2" ]; then \ + elif [ "$PHP_VER" = "8.2" ]; then \ FRANKENPHP_VERSION="1.1.0" \ && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ @@ -195,6 +196,8 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ + else \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" && exit 1; \ fi # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 82021b3b5..b7aa88be5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -690,7 +690,7 @@ jobs: python3 run_server_tests.py ../tests/server ../tests/testlib --server=${{ matrix.server }} --max-runs=3 test_php_frankenphp: - name: FrankenPHP php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} + name: CentOS FrankenPHP php-${{ matrix.php_version }} ${{ matrix.server }} ${{ matrix.arch == '' && 'x86_64' || 'arm' }} runs-on: ubuntu-24.04${{ matrix.arch }} container: image: ghcr.io/aikidosec/firewall-php-test-centos-zts:${{ matrix.php_version }}-v2 From 9173a750e09af01f73eb34f22c40194fd6f22ec7 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:26:56 +0200 Subject: [PATCH 074/170] Dont fail the images build if PHP < 8.2 --- .github/workflows/Dockerfile.centos-php-test-zts | 2 +- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 56afc779d..7e0b2a466 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -201,7 +201,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" && exit 1; \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" \ fi # Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c9b4cbd64..6b2bbe227 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -197,7 +197,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" && exit 1; \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" \ fi # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) From 04f5eea4db88053a37cda71bb7fd8940c44e171c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:32:13 +0200 Subject: [PATCH 075/170] Fix syntax in error message for unsupported PHP versions in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 2 +- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7e0b2a466..9973e6bc8 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -201,7 +201,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi # Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 6b2bbe227..75fdfed73 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -197,7 +197,7 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)" \ + echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) From 82b10c3e608e25aebf6a87ceeb93ce03c9bb7523 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:38:42 +0200 Subject: [PATCH 076/170] Refactor Dockerfile syntax for improved readability in CentOS and Ubuntu workflows --- .github/workflows/Dockerfile.centos-php-test-zts | 4 ++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 9973e6bc8..94d08d4b5 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -199,8 +199,8 @@ RUN ARCH=$(uname -m) && \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - else \ + && mkdir -p /usr/lib/frankenphp/modules; + else echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 75fdfed73..af3162022 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -195,14 +195,14 @@ RUN ARCH=$(uname -m) && \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - else \ + && mkdir -p /usr/lib/frankenphp/modules; + else echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) RUN mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /etc/frankenphp/php.d \git && mkdir -p /usr/lib/frankenphp/modules # Install web servers and database (without PHP packages) From 75654eb18c176efc6ac578bf0fe0c46c431f6dd1 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:40:27 +0200 Subject: [PATCH 077/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 4 ++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 94d08d4b5..9973e6bc8 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -199,8 +199,8 @@ RUN ARCH=$(uname -m) && \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; - else + && mkdir -p /usr/lib/frankenphp/modules; \ + else \ echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index af3162022..f289c5653 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -195,8 +195,8 @@ RUN ARCH=$(uname -m) && \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; - else + && mkdir -p /usr/lib/frankenphp/modules; \ + else \ echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi From c79e2ab56706d868c398427cc83a779742fc0dab Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 18:42:47 +0200 Subject: [PATCH 078/170] removed extra keyword --- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index f289c5653..b2f3d1238 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -202,7 +202,7 @@ RUN ARCH=$(uname -m) && \ # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) RUN mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \git + && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules # Install web servers and database (without PHP packages) From 9c662286ab8fedf6618e324f11e408ffbc80222a Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 17:30:17 +0000 Subject: [PATCH 079/170] Update GitHub Actions workflows to use 'command -v' for FrankenPHP checks and enhance Apache configuration in Dockerfiles for prefork MPM and module loading. --- .github/workflows/Dockerfile.ubuntu-php-test-nts | 7 ++++++- .github/workflows/Dockerfile.ubuntu-php-test-zts | 10 ++++++++++ .github/workflows/build.yml | 4 ++-- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-nts b/.github/workflows/Dockerfile.ubuntu-php-test-nts index 4d40ce77d..c775b4ee5 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-nts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-nts @@ -101,9 +101,14 @@ COPY --from=php-build /usr/lib/apache2/modules/ /usr/lib/apache2/modules/ # Configure Apache to use prefork MPM (non-threaded) for NTS PHP # This must be done in the final stage since the base image has default MPM settings +RUN service apache2 stop || true + +# Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ - a2enmod mpm_prefork || true + a2enmod mpm_prefork rewrite cgi cgid || true + +RUN service apache2 start || true RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index b2f3d1238..51f9fb360 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -60,6 +60,11 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d +# Apache: switch to prefork for mod_php scenario and enable rewrite +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork rewrite cgi cgid || true + # Build PHP with ZTS enabled RUN ./configure \ --prefix=/usr/local \ @@ -214,11 +219,16 @@ RUN apt-get update && \ apache2-bin \ && rm -rf /var/lib/apt/lists/* + +RUN service apache2 stop || true + # Apache: switch to prefork for mod_php scenario and enable rewrite RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ a2enmod mpm_prefork rewrite cgi cgid || true +RUN service apache2 start || true + # ---- Python toolchain used by tests ---- ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ PYTHONDONTWRITEBYTECODE=1 \ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b7aa88be5..83ea6cfa3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -711,7 +711,7 @@ jobs: uname -a cat /etc/centos-release || cat /etc/redhat-release || echo "CentOS/Stream detected" php -v - which frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) + command -v frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) - name: Verify ZTS is enabled run: | @@ -799,7 +799,7 @@ jobs: - name: Verify FrankenPHP is installed run: | - which frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) + command -v frankenphp && frankenphp -v || (echo "ERROR: FrankenPHP not found!" && exit 1) - name: Start MariaDB (background) run: | From aaa63344abfd3e8aa67a59ba0d90e6899d9d3c57 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 11 Dec 2025 17:57:10 +0000 Subject: [PATCH 080/170] Don't test anymore franken with PHP 8.2. is too old, with a lot of bugs(nobody uses it anymore) --- .github/workflows/Dockerfile.centos-php-test-zts | 10 +--------- .github/workflows/Dockerfile.ubuntu-php-test-zts | 10 +--------- .github/workflows/build.yml | 4 ++-- 3 files changed, 4 insertions(+), 20 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 9973e6bc8..79cece337 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -172,7 +172,7 @@ RUN ARCH=$(uname -m) && \ fi && \ if [ "$PHP_VER" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}-gnu" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ @@ -192,14 +192,6 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "$PHP_VER" = "8.2" ]; then \ - FRANKENPHP_VERSION="1.1.0" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ else \ echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 51f9fb360..9fde5e460 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -171,7 +171,7 @@ RUN ARCH=$(uname -m) && \ fi && \ if [ "$PHP_VER" = "8.5" ]; then \ FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ + && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}-gnu" \ && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ && chmod +x /usr/local/bin/frankenphp \ && mkdir -p /etc/frankenphp/caddy.d \ @@ -193,14 +193,6 @@ RUN ARCH=$(uname -m) && \ && mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "$PHP_VER" = "8.2" ]; then \ - FRANKENPHP_VERSION="1.1.0" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ else \ echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ fi diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 83ea6cfa3..2a8b78358 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -698,7 +698,7 @@ jobs: needs: [ build_rpm ] strategy: matrix: - php_version: ['8.2', '8.3', '8.4', '8.5'] + php_version: ['8.3', '8.4', '8.5'] server: ['frankenphp-worker', 'frankenphp-classic'] arch: ['', '-arm'] fail-fast: false @@ -774,7 +774,7 @@ jobs: needs: [ build_deb ] strategy: matrix: - php_version: ['8.2', '8.3', '8.4', '8.5'] + php_version: ['8.3', '8.4', '8.5'] server: ['frankenphp-worker', 'frankenphp-classic'] arch: ['', '-arm'] fail-fast: false From cc2a5a421a903de4d74c89561f630b3998cf707d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 02:11:45 +0200 Subject: [PATCH 081/170] Update GitHub Actions workflows and Dockerfiles to include PHP 8.2 support, enhance FrankenPHP binary extraction, and improve thread configuration in test scripts. --- .../workflows/Dockerfile.centos-php-test-zts | 56 ++++++++----------- .../workflows/Dockerfile.ubuntu-php-test-zts | 55 +++++------------- .github/workflows/build.yml | 4 +- tools/server_tests/frankenphp_classic/main.py | 2 +- tools/server_tests/frankenphp_worker/main.py | 27 ++++++++- 5 files changed, 64 insertions(+), 80 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 79cece337..c9f99944a 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -92,8 +92,22 @@ RUN ./configure \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true # Final image with PHP and test infrastructure +# Stage to extract FrankenPHP binary from official Docker image +# Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 +ARG PHP_VERSION=8.3 +ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} + +FROM ${FRANKENPHP_IMAGE} AS frankenphp-source + FROM base AS final COPY --from=php-build /usr/local /usr/local +# Copy FrankenPHP binary from official image +# The binary is typically at /usr/local/bin/frankenphp in the official images +COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp +RUN chmod +x /usr/local/bin/frankenphp && \ + mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ @@ -159,42 +173,16 @@ RUN mkdir -p /etc/php-fpm.d && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Install FrankenPHP binary based on PHP version and architecture -# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 -RUN ARCH=$(uname -m) && \ - PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ - if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ - ARCH_SUFFIX="x86_64"; \ - elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ - ARCH_SUFFIX="aarch64"; \ +# Verify FrankenPHP binary was copied and create directories +RUN if [ -f /usr/local/bin/frankenphp ]; then \ + chmod +x /usr/local/bin/frankenphp && \ + frankenphp -v || echo "Warning: frankenphp version check failed"; \ else \ - echo "Unsupported architecture: $ARCH" && exit 1; \ + echo "ERROR: frankenphp binary not found!" && exit 1; \ fi && \ - if [ "$PHP_VER" = "8.5" ]; then \ - FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}-gnu" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "$PHP_VER" = "8.4" ]; then \ - FRANKENPHP_VERSION="1.9.1" \ - && FRANKENPHP_RPM_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-0.0.0-1.${ARCH_SUFFIX}.rpm" \ - && curl -fsSL -L -o /tmp/frankenphp.rpm "$FRANKENPHP_RPM_URL" \ - && yum install -y /tmp/frankenphp.rpm \ - && rm -f /tmp/frankenphp.rpm; \ - elif [ "$PHP_VER" = "8.3" ]; then \ - FRANKENPHP_VERSION="1.3.2" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_SUFFIX}" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ - fi + mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules # Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) RUN mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9fde5e460..9f1c148ad 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -86,9 +86,23 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +# Stage to extract FrankenPHP binary from official Docker image +# Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 +ARG PHP_VERSION=8.3 +ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} + +FROM ${FRANKENPHP_IMAGE} AS frankenphp-source + # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local +# Copy FrankenPHP binary from official image +# The binary is at /usr/local/bin/frankenphp in the official images +COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp +RUN chmod +x /usr/local/bin/frankenphp && \ + mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ @@ -156,47 +170,6 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ # Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Install FrankenPHP binary based on PHP version and architecture -# PHP 8.5 = v1.10.1, PHP 8.4 = v1.9.1, PHP 8.3 = v1.3.2, PHP 8.2 = v1.1.0 -RUN ARCH=$(uname -m) && \ - PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ - if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ - ARCH_SUFFIX="amd64"; \ - ARCH_BINARY="x86_64"; \ - elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ - ARCH_SUFFIX="arm64"; \ - ARCH_BINARY="aarch64"; \ - else \ - echo "Unsupported architecture: $ARCH" && exit 1; \ - fi && \ - if [ "$PHP_VER" = "8.5" ]; then \ - FRANKENPHP_VERSION="1.10.1" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}-gnu" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - elif [ "$PHP_VER" = "8.4" ]; then \ - FRANKENPHP_VERSION="1.9.1" \ - && FRANKENPHP_DEB_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp_${FRANKENPHP_VERSION}-1_${ARCH_SUFFIX}.deb" \ - && curl -fsSL -L -o /tmp/frankenphp.deb "$FRANKENPHP_DEB_URL" \ - && apt-get update \ - && apt-get install -y /tmp/frankenphp.deb \ - && rm -f /tmp/frankenphp.deb \ - && rm -rf /var/lib/apt/lists/*; \ - elif [ "$PHP_VER" = "8.3" ]; then \ - FRANKENPHP_VERSION="1.3.2" \ - && FRANKENPHP_BINARY_URL="https://github.com/php/frankenphp/releases/download/v${FRANKENPHP_VERSION}/frankenphp-linux-${ARCH_BINARY}" \ - && curl -fsSL -L -o /usr/local/bin/frankenphp "$FRANKENPHP_BINARY_URL" \ - && chmod +x /usr/local/bin/frankenphp \ - && mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ - else \ - echo "Unsupported PHP version: $PHP_VER (supported: 8.2, 8.3, 8.4, 8.5)"; \ - fi - # Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) RUN mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2a8b78358..83ea6cfa3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -698,7 +698,7 @@ jobs: needs: [ build_rpm ] strategy: matrix: - php_version: ['8.3', '8.4', '8.5'] + php_version: ['8.2', '8.3', '8.4', '8.5'] server: ['frankenphp-worker', 'frankenphp-classic'] arch: ['', '-arm'] fail-fast: false @@ -774,7 +774,7 @@ jobs: needs: [ build_deb ] strategy: matrix: - php_version: ['8.3', '8.4', '8.5'] + php_version: ['8.2', '8.3', '8.4', '8.5'] server: ['frankenphp-worker', 'frankenphp-classic'] arch: ['', '-arm'] fail-fast: false diff --git a/tools/server_tests/frankenphp_classic/main.py b/tools/server_tests/frankenphp_classic/main.py index 7c9c66b11..63c9550e0 100644 --- a/tools/server_tests/frankenphp_classic/main.py +++ b/tools/server_tests/frankenphp_classic/main.py @@ -58,7 +58,7 @@ def frankenphp_classic_pre_tests(tests_data): threads = total_workers * 2 with open(caddyfile_path, 'w') as f: - f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads)) + f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads*2)) for test_data in tests_data: f.write("\n" + test_data["site_block"]) diff --git a/tools/server_tests/frankenphp_worker/main.py b/tools/server_tests/frankenphp_worker/main.py index a59ed8574..bf0aae306 100644 --- a/tools/server_tests/frankenphp_worker/main.py +++ b/tools/server_tests/frankenphp_worker/main.py @@ -10,7 +10,28 @@ num_workers = 2 -caddyfile_base_template = """{{ +def get_php_version(): + """Get PHP version as a tuple (major, minor)""" + try: + result = subprocess.run(['php', '-r', 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;'], + capture_output=True, text=True, check=True) + version_str = result.stdout.strip() + major, minor = version_str.split('.') + return (int(major), int(minor)) + except: + return (8, 3) # Default to newer version + +def get_caddyfile_base_template(): + """Get the appropriate caddyfile template based on PHP version""" + php_version = get_php_version() + + # FrankenPHP 1.1.0 (PHP 8.2) doesn't support the {{ global options block + if php_version == (8, 2): + # Use a simpler format without global options block + return "" + else: + # Newer versions support the global options block + return """{{ frankenphp {{ num_threads {num_threads} max_threads {max_threads} @@ -110,7 +131,9 @@ def frankenphp_worker_pre_tests(tests_data): threads = total_workers * 3 with open(caddyfile_path, 'w') as f: - f.write(caddyfile_base_template.format(num_threads=threads, max_threads=threads)) + base_template = get_caddyfile_base_template() + if base_template: + f.write(base_template.format(num_threads=threads, max_threads=threads*2)) for test_data in tests_data: f.write("\n" + test_data["site_block"]) From a99b81270e92beb1d43677e795ca46efd937a597 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 02:27:30 +0200 Subject: [PATCH 082/170] Remove hardcoded PHP version in Dockerfiles for CentOS and Ubuntu workflows to allow dynamic versioning. --- .github/workflows/Dockerfile.centos-php-test-zts | 3 +-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index c9f99944a..513c1c14a 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -91,10 +91,9 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Final image with PHP and test infrastructure # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -ARG PHP_VERSION=8.3 +ARG PHP_VERSION ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ${FRANKENPHP_IMAGE} AS frankenphp-source diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9f1c148ad..8a8d32f0e 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -88,7 +88,7 @@ RUN ./configure \ # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -ARG PHP_VERSION=8.3 +ARG PHP_VERSION ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ${FRANKENPHP_IMAGE} AS frankenphp-source From cc521c468f52828454fe083fff69deafdaa69f1c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 02:32:31 +0200 Subject: [PATCH 083/170] Add FrankenPHP image argument to Dockerfiles for CentOS and Ubuntu workflows --- .github/workflows/Dockerfile.centos-php-test-zts | 3 +-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 513c1c14a..5b196152f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -5,6 +5,7 @@ ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} +ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ${BASE_IMAGE} AS base SHELL ["/bin/bash", "-euo", "pipefail", "-c"] @@ -93,8 +94,6 @@ RUN ./configure \ # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -ARG PHP_VERSION -ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ${FRANKENPHP_IMAGE} AS frankenphp-source diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 8a8d32f0e..e3d2ec708 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -5,6 +5,7 @@ ARG DEBIAN_FRONTEND=noninteractive ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} +ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ubuntu:24.04 AS base SHELL ["/bin/bash", "-eo", "pipefail", "-c"] @@ -88,8 +89,6 @@ RUN ./configure \ # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -ARG PHP_VERSION -ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} FROM ${FRANKENPHP_IMAGE} AS frankenphp-source From 206dbdae877a3604a4b304a6ed12687bbc2a1d65 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:13:31 +0200 Subject: [PATCH 084/170] . --- .../workflows/Dockerfile.centos-php-test-zts | 48 ++++++++----------- .../workflows/Dockerfile.ubuntu-php-test-zts | 39 +++++++++------ 2 files changed, 46 insertions(+), 41 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 5b196152f..a21eb6511 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -94,19 +94,30 @@ RUN ./configure \ # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 - FROM ${FRANKENPHP_IMAGE} AS frankenphp-source FROM base AS final + +ARG PHP_VERSION +RUN case "$PHP_VERSION" in \ + 8.2|8.3|8.4|8.5) \ + echo "Using FrankenPHP Docker image for PHP_VERSION=${PHP_VERSION}"; \ + ;; \ + *) \ + echo "WARNING: FrankenPHP Docker images are officially only for PHP >= 8.2 (got PHP_VERSION=${PHP_VERSION}). Build will continue."; \ + ;; \ + esac + COPY --from=php-build /usr/local /usr/local -# Copy FrankenPHP binary from official image -# The binary is typically at /usr/local/bin/frankenphp in the official images -COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp -RUN chmod +x /usr/local/bin/frankenphp && \ - mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules +RUN if [ -f /usr/local/bin/frankenphp ]; then \ + chmod +x /usr/local/bin/frankenphp && \ + mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + else \ + echo "WARNING: frankenphp binary not found in source image; skipping FrankenPHP setup in this step."; \ + fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ @@ -167,25 +178,13 @@ RUN mkdir -p /etc/php-fpm.d && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Verify FrankenPHP binary was copied and create directories RUN if [ -f /usr/local/bin/frankenphp ]; then \ - chmod +x /usr/local/bin/frankenphp && \ frankenphp -v || echo "Warning: frankenphp version check failed"; \ else \ - echo "ERROR: frankenphp binary not found!" && exit 1; \ - fi && \ - mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules - -# Create FrankenPHP folder structure (for binary installations, RPM creates these automatically) -RUN mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules + echo "WARNING: frankenphp binary not found; FrankenPHP-specific tests will be skipped."; \ + fi # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ @@ -194,8 +193,3 @@ RUN python3 -m pip install --no-cache-dir --upgrade pip && \ # Quality-of-life WORKDIR /work CMD ["bash"] - - - - - diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index e3d2ec708..a648ce14d 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -89,19 +89,31 @@ RUN ./configure \ # Stage to extract FrankenPHP binary from official Docker image # Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 - FROM ${FRANKENPHP_IMAGE} AS frankenphp-source # Final image with PHP and test infrastructure FROM base AS final + +ARG PHP_VERSION +RUN case "$PHP_VERSION" in \ + 8.2|8.3|8.4|8.5) \ + echo "Using FrankenPHP Docker image for PHP_VERSION=${PHP_VERSION}"; \ + ;; \ + *) \ + echo "WARNING: FrankenPHP Docker images are officially only for PHP >= 8.2 (got PHP_VERSION=${PHP_VERSION}). Build will continue."; \ + ;; \ + esac + COPY --from=php-build /usr/local /usr/local -# Copy FrankenPHP binary from official image -# The binary is at /usr/local/bin/frankenphp in the official images -COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp -RUN chmod +x /usr/local/bin/frankenphp && \ - mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules + +RUN if [ -f /usr/local/bin/frankenphp ]; then \ + chmod +x /usr/local/bin/frankenphp && \ + mkdir -p /etc/frankenphp/caddy.d \ + && mkdir -p /etc/frankenphp/php.d \ + && mkdir -p /usr/lib/frankenphp/modules; \ + else \ + echo "WARNING: frankenphp binary not found in source image; skipping FrankenPHP setup in this step."; \ + fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ @@ -165,14 +177,13 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true -# Configure MySQL socket path for mysqli (so "localhost" connections work) -# Note: /usr/local/etc/php/conf.d was created before PHP build RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini -# Create FrankenPHP folder structure (for binary installations, DEB creates these automatically) -RUN mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules +RUN if [ -f /usr/local/bin/frankenphp ]; then \ + frankenphp -v || echo "Warning: frankenphp version check failed"; \ + else \ + echo "WARNING: frankenphp binary not found; FrankenPHP-specific tests will be skipped."; \ + fi # Install web servers and database (without PHP packages) RUN apt-get update && \ From 2090e77e606369d4260db5aabba703f2d016f50b Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:23:28 +0200 Subject: [PATCH 085/170] Add FrankenPHP binary copy step to Dockerfiles for CentOS and Ubuntu workflows --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index a21eb6511..4f8295623 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -110,6 +110,7 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local +COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp RUN if [ -f /usr/local/bin/frankenphp ]; then \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index a648ce14d..c2b3cb250 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -106,6 +106,7 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local +COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp RUN if [ -f /usr/local/bin/frankenphp ]; then \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ From 2f27dcbdaa48ea22c8e166135bd3e81b45546182 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:25:35 +0200 Subject: [PATCH 086/170] . --- .devcontainer/centos_php_test_nts/Dockerfile | 243 ++++++++++-------- .../workflows/Dockerfile.centos-php-test-zts | 7 +- 2 files changed, 132 insertions(+), 118 deletions(-) diff --git a/.devcontainer/centos_php_test_nts/Dockerfile b/.devcontainer/centos_php_test_nts/Dockerfile index ae217015c..90d9d51ef 100644 --- a/.devcontainer/centos_php_test_nts/Dockerfile +++ b/.devcontainer/centos_php_test_nts/Dockerfile @@ -1,84 +1,42 @@ # syntax=docker/dockerfile:1.7 -# CentOS Stream 9 test image with PHP built from source in NTS mode +# Ubuntu test image with PHP built from source in NTS mode # Used for testing the extension with standard PHP (non-thread-safe) -ARG BASE_IMAGE=quay.io/centos/centos:stream9 +ARG DEBIAN_FRONTEND=noninteractive ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -FROM ${BASE_IMAGE} AS base -SHELL ["/bin/bash", "-euo", "pipefail", "-c"] +FROM ubuntu:24.04 AS base +SHELL ["/bin/bash", "-eo", "pipefail", "-c"] -ENV TZ=Etc/UTC \ +ENV DEBIAN_FRONTEND=noninteractive \ + TZ=Etc/UTC \ LC_ALL=C.UTF-8 \ LANG=C.UTF-8 \ + LANGUAGE=C.UTF-8 \ PHP_VERSION=${PHP_VERSION} -RUN yum install -y yum-utils && \ - dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true - -# Install minimal tools needed for re2c build (replace curl-minimal with full curl) -RUN yum install -y xz tar gcc gcc-c++ make - -ENV RE2C_VERSION=3.1 -RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ - && mkdir -p /tmp/re2c-src \ - && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ - && cd /tmp/re2c-src \ - && ./configure \ - && make -j"$(nproc)" \ - && make install \ - && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz - -# Install remaining build dependencies and tools -RUN yum install -y autoconf bison pkgconfig \ - libxml2-devel sqlite-devel libcurl-devel openssl-devel \ - libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ - libicu-devel readline-devel libxslt-devel \ - git wget \ - python3 python3-devel python3-pip \ - nginx httpd httpd-devel procps-ng mysql-server \ - cpio unzip nano lsof jq rpmdevtools sudo \ - && yum clean all - -# Install mariadb-devel separately (may need different repo or skip if not critical) -RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" - -# Install Go toolchain (architecture-aware) -RUN ARCH=$(uname -m) && \ - if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ - curl -O https://dl.google.com/go/go1.23.3.linux-amd64.tar.gz && \ - tar -C /usr/local -xzf go1.23.3.linux-amd64.tar.gz && \ - rm -f go1.23.3.linux-amd64.tar.gz; \ - elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ - curl -O https://dl.google.com/go/go1.23.3.linux-arm64.tar.gz && \ - tar -C /usr/local -xzf go1.23.3.linux-arm64.tar.gz && \ - rm -f go1.23.3.linux-arm64.tar.gz; \ - else \ - echo "Unsupported architecture: $ARCH" && exit 1; \ - fi -ENV PATH="/usr/local/go/bin:${PATH}" - -# Install protoc and Go protobuf plugins -RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest \ - && go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest -RUN ARCH=$(uname -m) && \ - if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ - PROTOC_ZIP=protoc-28.3-linux-x86_64.zip && \ - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ - unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ - unzip -o $PROTOC_ZIP -d /usr/local include/* && \ - rm -f $PROTOC_ZIP; \ - elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ - PROTOC_ZIP=protoc-28.3-linux-aarch_64.zip && \ - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ - unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ - unzip -o $PROTOC_ZIP -d /usr/local include/* && \ - rm -f $PROTOC_ZIP; \ - else \ - echo "Unsupported architecture: $ARCH" && exit 1; \ - fi -ENV PATH="$HOME/go/bin:${PATH}" +# Install base dependencies and build tools +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + ca-certificates curl gnupg lsb-release tzdata locales \ + software-properties-common apt-transport-https \ + git make unzip xz-utils \ + build-essential autoconf bison re2c pkg-config \ + libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ + libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ + libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + apache2 \ + apache2-bin \ + apache2-dev \ + nginx \ + mariadb-server \ + && rm -rf /var/lib/apt/lists/* + +# Timezone to UTC +RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ + echo "${TZ}" > /etc/timezone && \ + dpkg-reconfigure -f noninteractive tzdata # Fetch and build PHP from source with NTS FROM base AS php-build @@ -87,10 +45,12 @@ WORKDIR /usr/src RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git WORKDIR /usr/src/php-src +RUN apt-get update && apt-get install -y wget && rm -rf /usr/local/go && wget https://go.dev/dl/go1.24.1.linux-arm64.tar.gz && tar -C /usr/local -xzf go1.24.1.linux-arm64.tar.gz && export PATH=/usr/local/go/bin:$PATH && wget https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-aarch_64.zip && unzip protoc-24.4-linux-aarch_64.zip -d /usr/local && chmod +x /usr/local/bin/protoc && export PATH=$PATH:/usr/local/bin && /usr/local/go/bin/go install google.golang.org/protobuf/cmd/protoc-gen-go@latest && /usr/local/go/bin/go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest && ln -sf /root/go/bin/protoc-gen-go /usr/local/bin/protoc-gen-go && ln -sf /root/go/bin/protoc-gen-go-grpc /usr/local/bin/protoc-gen-go-grpc && protoc-gen-go --version && protoc-gen-go-grpc --version && protoc --version && go version + RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac RUN ./buildconf --force -# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +# Patch openssl.c for OpenSSL compatibility RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ indent = $0; \ @@ -105,6 +65,13 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true +RUN mkdir -p /usr/local/etc/php/conf.d + +# Apache: switch to prefork for mod_php scenario and enable rewrite +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork rewrite cgi cgid || true + # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -113,6 +80,7 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ + --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -120,6 +88,8 @@ RUN ./configure \ --with-zlib \ --with-zip \ --with-apxs2=/usr/bin/apxs \ + --disable-zts \ +&& mkdir -p /usr/lib/apache2/modules \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -127,15 +97,17 @@ RUN ./configure \ # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local -# Copy libphp.so module (installed by apxs to Apache modules directory, not /usr/local) -# Note: /usr/lib64/ is the standard path for 64-bit libraries on both x86_64 and aarch64 -# The architecture is determined by the binary itself (ELF header), not the directory path -# apxs installs to /usr/lib64/httpd/modules/ on CentOS/RHEL, or /usr/lib/httpd/modules/ on some distros -# We need to copy it from the build stage since COPY --from=php-build /usr/local only copies /usr/local -RUN mkdir -p /usr/lib64/httpd/modules /usr/lib/httpd/modules -# Copy from lib64 (standard on CentOS/RHEL for both x86_64 and aarch64) -# If libphp.so exists in the build stage, it will be copied; if not, COPY will fail gracefully -COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ + +RUN mkdir -p /usr/lib/apache2/modules +# Copy libphp module from build stage (PHP 7 uses libphp7.so, PHP 8+ uses libphp.so) +# Use wildcard to copy whichever file exists +COPY --from=php-build /usr/lib/apache2/modules/libphp*.so /usr/lib/apache2/modules/ + +# Configure Apache to use prefork MPM (non-threaded) for NTS PHP +# This must be done in the final stage since the base image has default MPM settings +RUN a2dismod mpm_event || true && \ + a2dismod mpm_worker || true && \ + a2enmod mpm_prefork || true RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ @@ -152,24 +124,27 @@ RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi -RUN mkdir -p /etc/php-fpm.d && \ +RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ + mkdir -p /etc/php/${PHP_VER}/fpm && \ + mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ + mkdir -p /run/php && \ mkdir -p /run/php-fpm && \ mkdir -p /var/run && \ - mkdir -p /var/log/php-fpm && \ - mkdir -p /etc/httpd || true && \ + mkdir -p /var/log && \ mkdir -p /usr/local/etc/php-fpm.d && \ mkdir -p /usr/local/etc/php/conf.d && \ - ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ + ln -sf /usr/local/etc/php/conf.d /etc/php/${PHP_VER}/fpm/conf.d || true && \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php${PHP_VER}-fpm.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php/${PHP_VER}/fpm/pool.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -192,39 +167,79 @@ RUN mkdir -p /etc/php-fpm.d && \ echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ exit 1) && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ - ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true + ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true # Configure MySQL socket path for mysqli (so "localhost" connections work) -RUN mkdir -p /usr/local/etc/php/conf.d && \ - echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini - -RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ - echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" - -# Configure Apache to load libphp.so module (if it exists) -RUN if [ -f /usr/lib64/httpd/modules/libphp.so ]; then \ - echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ - echo "LoadModule php_module modules/libphp.so" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "# Directory index" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ - echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf"; \ +# Note: /usr/local/etc/php/conf.d was created before PHP build +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + +# Configure Apache to load libphp module (if it exists) +# PHP 7 uses libphp7.so, other versions use libphp.so +# Check for both possible filenames to handle any case +RUN if [ -f "/usr/lib/apache2/modules/libphp7.so" ]; then \ + LIBPHP_NAME="libphp7.so"; \ + MODULE_NAME="php7_module"; \ + elif [ -f "/usr/lib/apache2/modules/libphp.so" ]; then \ + LIBPHP_NAME="libphp.so"; \ + MODULE_NAME="php_module"; \ else \ - echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ + echo "Warning: No libphp module found in /usr/lib/apache2/modules/" && \ + ls -la /usr/lib/apache2/modules/ || true && \ + exit 0; \ + fi && \ + if [ -n "$LIBPHP_NAME" ]; then \ + echo "# Load PHP module for Apache" > /etc/apache2/conf-available/php.conf && \ + echo "LoadModule ${MODULE_NAME} /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-available/php.conf && \ + echo " " >> /etc/apache2/conf-available/php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-available/php.conf && \ + echo " " >> /etc/apache2/conf-available/php.conf && \ + echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-available/php.conf && \ + echo "" >> /etc/apache2/conf-available/php.conf && \ + a2enconf php >/dev/null 2>&1 || true && \ + echo "Created Apache PHP module configuration with ${LIBPHP_NAME} (${MODULE_NAME})"; \ fi -# Python deps used by test harness -RUN python3 -m pip install --no-cache-dir --upgrade pip && \ - python3 -m pip install --no-cache-dir flask requests psutil +# ---- Python toolchain used by tests ---- +ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/opt/ci-venv \ + PATH="/opt/ci-venv/bin:${PATH}" + +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3 python3-venv python3-pip python3-dev \ + && python3 -m venv "$VIRTUAL_ENV" \ + && "$VIRTUAL_ENV/bin/pip" install --no-cache-dir \ + flask pandas psutil requests \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +# Helper: start MariaDB +RUN mkdir -p /usr/local/bin /var/lib/mysql /run/mysqld && \ + printf '%s\n' '#!/usr/bin/env bash' \ + 'set -euo pipefail' \ + 'mkdir -p /var/lib/mysql /run/mysqld' \ + 'chown -R mysql:mysql /var/lib/mysql /run/mysqld' \ + 'if [ ! -d /var/lib/mysql/mysql ]; then' \ + ' mysqld --initialize-insecure --user=mysql --datadir=/var/lib/mysql' \ + 'fi' \ + 'mysqld --user=mysql --datadir=/var/lib/mysql &' \ + 'pid=$!' \ + 'for i in {1..30}; do mysqladmin ping --silent && break; sleep 1; done' \ + 'mysql -u root -e "CREATE DATABASE IF NOT EXISTS db;" || true' \ + 'mysql -u root -e "ALTER USER '\''root'\''@'\''localhost'\'' IDENTIFIED BY '\''pwd'\''; FLUSH PRIVILEGES;" || true' \ + 'wait $pid' \ + > /usr/local/bin/start-mariadb && \ + chmod +x /usr/local/bin/start-mariadb + +# Create PHP-CGI symlink for CGI tests (using source-built PHP) +RUN mkdir -p /usr/lib/cgi-bin && \ + ln -sf /usr/local/bin/php-cgi /usr/lib/cgi-bin/php-cgi || \ + (echo "Note: php-cgi may not be available in source build" && true) -# Quality-of-life WORKDIR /work CMD ["bash"] - diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 4f8295623..d144f21c5 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -110,14 +110,13 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp -RUN if [ -f /usr/local/bin/frankenphp ]; then \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ + if awk "BEGIN {exit !(${PHP_VERSION} >= 8.2)}"; then \ + cp /tmp/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - else \ - echo "WARNING: frankenphp binary not found in source image; skipping FrankenPHP setup in this step."; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ From 41ee3b5432f17492c069ac3661b11a0bfcd4eebf Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:28:53 +0200 Subject: [PATCH 087/170] . --- .devcontainer/centos_php_test_nts/Dockerfile | 243 ++++++++---------- .../workflows/Dockerfile.ubuntu-php-test-zts | 7 +- 2 files changed, 117 insertions(+), 133 deletions(-) diff --git a/.devcontainer/centos_php_test_nts/Dockerfile b/.devcontainer/centos_php_test_nts/Dockerfile index 90d9d51ef..ae217015c 100644 --- a/.devcontainer/centos_php_test_nts/Dockerfile +++ b/.devcontainer/centos_php_test_nts/Dockerfile @@ -1,42 +1,84 @@ # syntax=docker/dockerfile:1.7 -# Ubuntu test image with PHP built from source in NTS mode +# CentOS Stream 9 test image with PHP built from source in NTS mode # Used for testing the extension with standard PHP (non-thread-safe) -ARG DEBIAN_FRONTEND=noninteractive +ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -FROM ubuntu:24.04 AS base -SHELL ["/bin/bash", "-eo", "pipefail", "-c"] +FROM ${BASE_IMAGE} AS base +SHELL ["/bin/bash", "-euo", "pipefail", "-c"] -ENV DEBIAN_FRONTEND=noninteractive \ - TZ=Etc/UTC \ +ENV TZ=Etc/UTC \ LC_ALL=C.UTF-8 \ LANG=C.UTF-8 \ - LANGUAGE=C.UTF-8 \ PHP_VERSION=${PHP_VERSION} -# Install base dependencies and build tools -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - ca-certificates curl gnupg lsb-release tzdata locales \ - software-properties-common apt-transport-https \ - git make unzip xz-utils \ - build-essential autoconf bison re2c pkg-config \ - libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ - libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ - libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ - apache2 \ - apache2-bin \ - apache2-dev \ - nginx \ - mariadb-server \ - && rm -rf /var/lib/apt/lists/* - -# Timezone to UTC -RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ - echo "${TZ}" > /etc/timezone && \ - dpkg-reconfigure -f noninteractive tzdata +RUN yum install -y yum-utils && \ + dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true + +# Install minimal tools needed for re2c build (replace curl-minimal with full curl) +RUN yum install -y xz tar gcc gcc-c++ make + +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz + +# Install remaining build dependencies and tools +RUN yum install -y autoconf bison pkgconfig \ + libxml2-devel sqlite-devel libcurl-devel openssl-devel \ + libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ + libicu-devel readline-devel libxslt-devel \ + git wget \ + python3 python3-devel python3-pip \ + nginx httpd httpd-devel procps-ng mysql-server \ + cpio unzip nano lsof jq rpmdevtools sudo \ + && yum clean all + +# Install mariadb-devel separately (may need different repo or skip if not critical) +RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" + +# Install Go toolchain (architecture-aware) +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + curl -O https://dl.google.com/go/go1.23.3.linux-amd64.tar.gz && \ + tar -C /usr/local -xzf go1.23.3.linux-amd64.tar.gz && \ + rm -f go1.23.3.linux-amd64.tar.gz; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + curl -O https://dl.google.com/go/go1.23.3.linux-arm64.tar.gz && \ + tar -C /usr/local -xzf go1.23.3.linux-arm64.tar.gz && \ + rm -f go1.23.3.linux-arm64.tar.gz; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi +ENV PATH="/usr/local/go/bin:${PATH}" + +# Install protoc and Go protobuf plugins +RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@latest \ + && go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest +RUN ARCH=$(uname -m) && \ + if [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then \ + PROTOC_ZIP=protoc-28.3-linux-x86_64.zip && \ + curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ + unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ + unzip -o $PROTOC_ZIP -d /usr/local include/* && \ + rm -f $PROTOC_ZIP; \ + elif [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \ + PROTOC_ZIP=protoc-28.3-linux-aarch_64.zip && \ + curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v28.3/$PROTOC_ZIP && \ + unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ + unzip -o $PROTOC_ZIP -d /usr/local include/* && \ + rm -f $PROTOC_ZIP; \ + else \ + echo "Unsupported architecture: $ARCH" && exit 1; \ + fi +ENV PATH="$HOME/go/bin:${PATH}" # Fetch and build PHP from source with NTS FROM base AS php-build @@ -45,12 +87,10 @@ WORKDIR /usr/src RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git WORKDIR /usr/src/php-src -RUN apt-get update && apt-get install -y wget && rm -rf /usr/local/go && wget https://go.dev/dl/go1.24.1.linux-arm64.tar.gz && tar -C /usr/local -xzf go1.24.1.linux-arm64.tar.gz && export PATH=/usr/local/go/bin:$PATH && wget https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protoc-24.4-linux-aarch_64.zip && unzip protoc-24.4-linux-aarch_64.zip -d /usr/local && chmod +x /usr/local/bin/protoc && export PATH=$PATH:/usr/local/bin && /usr/local/go/bin/go install google.golang.org/protobuf/cmd/protoc-gen-go@latest && /usr/local/go/bin/go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest && ln -sf /root/go/bin/protoc-gen-go /usr/local/bin/protoc-gen-go && ln -sf /root/go/bin/protoc-gen-go-grpc /usr/local/bin/protoc-gen-go-grpc && protoc-gen-go --version && protoc-gen-go-grpc --version && protoc --version && go version - RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac RUN ./buildconf --force -# Patch openssl.c for OpenSSL compatibility +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ indent = $0; \ @@ -65,13 +105,6 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ fi || true -RUN mkdir -p /usr/local/etc/php/conf.d - -# Apache: switch to prefork for mod_php scenario and enable rewrite -RUN a2dismod mpm_event || true && \ - a2dismod mpm_worker || true && \ - a2enmod mpm_prefork rewrite cgi cgid || true - # Build PHP with NTS (no ZTS flags) RUN ./configure \ --prefix=/usr/local \ @@ -80,7 +113,6 @@ RUN ./configure \ --enable-fpm \ --enable-mbstring \ --enable-pcntl \ - --enable-cgi \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -88,8 +120,6 @@ RUN ./configure \ --with-zlib \ --with-zip \ --with-apxs2=/usr/bin/apxs \ - --disable-zts \ -&& mkdir -p /usr/lib/apache2/modules \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true @@ -97,17 +127,15 @@ RUN ./configure \ # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local - -RUN mkdir -p /usr/lib/apache2/modules -# Copy libphp module from build stage (PHP 7 uses libphp7.so, PHP 8+ uses libphp.so) -# Use wildcard to copy whichever file exists -COPY --from=php-build /usr/lib/apache2/modules/libphp*.so /usr/lib/apache2/modules/ - -# Configure Apache to use prefork MPM (non-threaded) for NTS PHP -# This must be done in the final stage since the base image has default MPM settings -RUN a2dismod mpm_event || true && \ - a2dismod mpm_worker || true && \ - a2enmod mpm_prefork || true +# Copy libphp.so module (installed by apxs to Apache modules directory, not /usr/local) +# Note: /usr/lib64/ is the standard path for 64-bit libraries on both x86_64 and aarch64 +# The architecture is determined by the binary itself (ELF header), not the directory path +# apxs installs to /usr/lib64/httpd/modules/ on CentOS/RHEL, or /usr/lib/httpd/modules/ on some distros +# We need to copy it from the build stage since COPY --from=php-build /usr/local only copies /usr/local +RUN mkdir -p /usr/lib64/httpd/modules /usr/lib/httpd/modules +# Copy from lib64 (standard on CentOS/RHEL for both x86_64 and aarch64) +# If libphp.so exists in the build stage, it will be copied; if not, COPY will fail gracefully +COPY --from=php-build /usr/lib64/httpd/modules/ /usr/lib64/httpd/modules/ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ if [ -z "$EXTENSION_DIR" ]; then \ @@ -124,27 +152,24 @@ RUN php -v | grep -v "ZTS" >/dev/null || (echo "ERROR: ZTS is enabled but should ENV PATH="/usr/local/bin:${PATH}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ - ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true -RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ - mkdir -p /etc/php/${PHP_VER}/fpm && \ - mkdir -p /etc/php/${PHP_VER}/fpm/pool.d && \ - mkdir -p /run/php && \ +RUN mkdir -p /etc/php-fpm.d && \ mkdir -p /run/php-fpm && \ mkdir -p /var/run && \ - mkdir -p /var/log && \ + mkdir -p /var/log/php-fpm && \ + mkdir -p /etc/httpd || true && \ mkdir -p /usr/local/etc/php-fpm.d && \ mkdir -p /usr/local/etc/php/conf.d && \ - ln -sf /usr/local/etc/php/conf.d /etc/php/${PHP_VER}/fpm/conf.d || true && \ + ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ echo "[global]" > /usr/local/etc/php-fpm.conf && \ echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php${PHP_VER}-fpm.log" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/etc/php/${PHP_VER}/fpm/pool.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ @@ -167,79 +192,39 @@ RUN PHP_VER=$(php -r 'echo PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;') && \ echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ exit 1) && \ - ln -sf /usr/local/etc/php-fpm.conf /etc/php/${PHP_VER}/fpm/php-fpm.conf && \ - ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php/${PHP_VER}/fpm/pool.d/www.conf || true + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true # Configure MySQL socket path for mysqli (so "localhost" connections work) -# Note: /usr/local/etc/php/conf.d was created before PHP build -RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini - -# Configure Apache to load libphp module (if it exists) -# PHP 7 uses libphp7.so, other versions use libphp.so -# Check for both possible filenames to handle any case -RUN if [ -f "/usr/lib/apache2/modules/libphp7.so" ]; then \ - LIBPHP_NAME="libphp7.so"; \ - MODULE_NAME="php7_module"; \ - elif [ -f "/usr/lib/apache2/modules/libphp.so" ]; then \ - LIBPHP_NAME="libphp.so"; \ - MODULE_NAME="php_module"; \ +RUN mkdir -p /usr/local/etc/php/conf.d && \ + echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + +RUN mkdir -p /etc/php/${PHP_VERSION}/apache2/conf.d && \ + echo "Created Apache mod_php directory: /etc/php/${PHP_VERSION}/apache2/conf.d" + +# Configure Apache to load libphp.so module (if it exists) +RUN if [ -f /usr/lib64/httpd/modules/libphp.so ]; then \ + echo "# Load PHP module for Apache" > /etc/httpd/conf.modules.d/10-php.conf && \ + echo "LoadModule php_module modules/libphp.so" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Configure PHP file handling" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo " SetHandler application/x-httpd-php" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "# Directory index" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "DirectoryIndex index.php index.html" >> /etc/httpd/conf.modules.d/10-php.conf && \ + echo "Created Apache PHP module configuration at /etc/httpd/conf.modules.d/10-php.conf"; \ else \ - echo "Warning: No libphp module found in /usr/lib/apache2/modules/" && \ - ls -la /usr/lib/apache2/modules/ || true && \ - exit 0; \ - fi && \ - if [ -n "$LIBPHP_NAME" ]; then \ - echo "# Load PHP module for Apache" > /etc/apache2/conf-available/php.conf && \ - echo "LoadModule ${MODULE_NAME} /usr/lib/apache2/modules/${LIBPHP_NAME}" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf && \ - echo "# Configure PHP file handling" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf && \ - echo " PHPIniDir /usr/local/etc/php" >> /etc/apache2/conf-available/php.conf && \ - echo " " >> /etc/apache2/conf-available/php.conf && \ - echo " SetHandler application/x-httpd-php" >> /etc/apache2/conf-available/php.conf && \ - echo " " >> /etc/apache2/conf-available/php.conf && \ - echo " DirectoryIndex index.php index.html" >> /etc/apache2/conf-available/php.conf && \ - echo "" >> /etc/apache2/conf-available/php.conf && \ - a2enconf php >/dev/null 2>&1 || true && \ - echo "Created Apache PHP module configuration with ${LIBPHP_NAME} (${MODULE_NAME})"; \ + echo "Warning: libphp.so not found, skipping Apache PHP module configuration"; \ fi -# ---- Python toolchain used by tests ---- -ENV PIP_DISABLE_PIP_VERSION_CHECK=1 \ - PYTHONDONTWRITEBYTECODE=1 \ - VIRTUAL_ENV=/opt/ci-venv \ - PATH="/opt/ci-venv/bin:${PATH}" - -RUN apt-get update && apt-get install -y --no-install-recommends \ - python3 python3-venv python3-pip python3-dev \ - && python3 -m venv "$VIRTUAL_ENV" \ - && "$VIRTUAL_ENV/bin/pip" install --no-cache-dir \ - flask pandas psutil requests \ - && apt-get clean && rm -rf /var/lib/apt/lists/* - -# Helper: start MariaDB -RUN mkdir -p /usr/local/bin /var/lib/mysql /run/mysqld && \ - printf '%s\n' '#!/usr/bin/env bash' \ - 'set -euo pipefail' \ - 'mkdir -p /var/lib/mysql /run/mysqld' \ - 'chown -R mysql:mysql /var/lib/mysql /run/mysqld' \ - 'if [ ! -d /var/lib/mysql/mysql ]; then' \ - ' mysqld --initialize-insecure --user=mysql --datadir=/var/lib/mysql' \ - 'fi' \ - 'mysqld --user=mysql --datadir=/var/lib/mysql &' \ - 'pid=$!' \ - 'for i in {1..30}; do mysqladmin ping --silent && break; sleep 1; done' \ - 'mysql -u root -e "CREATE DATABASE IF NOT EXISTS db;" || true' \ - 'mysql -u root -e "ALTER USER '\''root'\''@'\''localhost'\'' IDENTIFIED BY '\''pwd'\''; FLUSH PRIVILEGES;" || true' \ - 'wait $pid' \ - > /usr/local/bin/start-mariadb && \ - chmod +x /usr/local/bin/start-mariadb - -# Create PHP-CGI symlink for CGI tests (using source-built PHP) -RUN mkdir -p /usr/lib/cgi-bin && \ - ln -sf /usr/local/bin/php-cgi /usr/lib/cgi-bin/php-cgi || \ - (echo "Note: php-cgi may not be available in source build" && true) +# Python deps used by test harness +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir flask requests psutil +# Quality-of-life WORKDIR /work CMD ["bash"] + diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c2b3cb250..4d18dd600 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -106,14 +106,13 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -COPY --from=frankenphp-source /usr/local/bin/frankenphp /usr/local/bin/frankenphp -RUN if [ -f /usr/local/bin/frankenphp ]; then \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ + if awk "BEGIN {exit !(${PHP_VERSION} >= 8.2)}"; then \ + cp /tmp/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules; \ - else \ - echo "WARNING: frankenphp binary not found in source image; skipping FrankenPHP setup in this step."; \ fi From dc77b17ef64810fbb29bad2e0e96cc37d0274447 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:45:37 +0200 Subject: [PATCH 088/170] + --- .../workflows/Dockerfile.centos-php-test-zts | 5 +++-- .../workflows/Dockerfile.ubuntu-php-test-zts | 4 ++-- .../build-centos-php-test-images-zts.yml | 22 +++++++++++++++++++ .../build-ubuntu-php-test-images-zts.yml | 22 +++++++++++++++++++ 4 files changed, 49 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index d144f21c5..7cdd55022 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -5,11 +5,12 @@ ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} +ARG FRANKENPHP_IMAGE=scratch FROM ${BASE_IMAGE} AS base SHELL ["/bin/bash", "-euo", "pipefail", "-c"] +ARG PHP_VERSION ENV TZ=Etc/UTC \ LC_ALL=C.UTF-8 \ LANG=C.UTF-8 \ @@ -111,7 +112,7 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ - if awk "BEGIN {exit !(${PHP_VERSION} >= 8.2)}"; then \ + if [ -f /tmp/frankenphp ]; then \ cp /tmp/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 4d18dd600..2888e6e60 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -5,7 +5,7 @@ ARG DEBIAN_FRONTEND=noninteractive ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=dunglas/frankenphp:php${PHP_VERSION} +ARG FRANKENPHP_IMAGE=scratch FROM ubuntu:24.04 AS base SHELL ["/bin/bash", "-eo", "pipefail", "-c"] @@ -107,7 +107,7 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ - if awk "BEGIN {exit !(${PHP_VERSION} >= 8.2)}"; then \ + if [ -f /tmp/frankenphp ]; then \ cp /tmp/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index a4bef6f12..4ed234dc0 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -28,6 +28,16 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set FrankenPHP image + id: frankenphp_image + run: | + PHP_VERSION="${{ matrix.php_version }}" + if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then + echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + else + echo "image=scratch" >> $GITHUB_OUTPUT + fi + - name: Build & push (amd64) uses: docker/build-push-action@v6 with: @@ -37,6 +47,7 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} + FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max @@ -56,6 +67,16 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set FrankenPHP image + id: frankenphp_image + run: | + PHP_VERSION="${{ matrix.php_version }}" + if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then + echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + else + echo "image=scratch" >> $GITHUB_OUTPUT + fi + - name: Build & push (arm64) uses: docker/build-push-action@v6 with: @@ -65,6 +86,7 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} + FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index c8eda9587..8dda73249 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -27,6 +27,16 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set FrankenPHP image + id: frankenphp_image + run: | + PHP_VERSION="${{ matrix.php_version }}" + if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then + echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + else + echo "image=scratch" >> $GITHUB_OUTPUT + fi + - uses: docker/build-push-action@v6 with: context: . @@ -35,6 +45,7 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} + FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max @@ -53,6 +64,16 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Set FrankenPHP image + id: frankenphp_image + run: | + PHP_VERSION="${{ matrix.php_version }}" + if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then + echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + else + echo "image=scratch" >> $GITHUB_OUTPUT + fi + - uses: docker/build-push-action@v6 with: context: . @@ -61,6 +82,7 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} + FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max From cab4b9cbdc82af8c7dd38f0beb400240132ddefb Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:49:45 +0200 Subject: [PATCH 089/170] +++ --- .github/workflows/Dockerfile.centos-php-test-zts | 6 +++--- .github/workflows/Dockerfile.ubuntu-php-test-zts | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7cdd55022..f6dd00520 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -111,9 +111,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ - if [ -f /tmp/frankenphp ]; then \ - cp /tmp/frankenphp /usr/local/bin/frankenphp && \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin,target=/tmp/frankenphp-bin \ + if [ -f /tmp/frankenphp-bin/frankenphp ]; then \ + cp /tmp/frankenphp-bin/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 2888e6e60..54a96a1f0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -106,9 +106,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin/frankenphp,target=/tmp/frankenphp \ - if [ -f /tmp/frankenphp ]; then \ - cp /tmp/frankenphp /usr/local/bin/frankenphp && \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin,target=/tmp/frankenphp-bin \ + if [ -f /tmp/frankenphp-bin/frankenphp ]; then \ + cp /tmp/frankenphp-bin/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ From 70b6876e2332d46d9df03a69bd8a3256694655cc Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 03:56:39 +0200 Subject: [PATCH 090/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 6 +++--- .github/workflows/Dockerfile.ubuntu-php-test-zts | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index f6dd00520..da8137234 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -111,9 +111,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin,target=/tmp/frankenphp-bin \ - if [ -f /tmp/frankenphp-bin/frankenphp ]; then \ - cp /tmp/frankenphp-bin/frankenphp /usr/local/bin/frankenphp && \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ + if [ -f /tmp/frankenphp-source/bin/frankenphp ]; then \ + cp /tmp/frankenphp-source/bin/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 54a96a1f0..22bc78023 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -106,9 +106,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local /usr/local -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local/bin,target=/tmp/frankenphp-bin \ - if [ -f /tmp/frankenphp-bin/frankenphp ]; then \ - cp /tmp/frankenphp-bin/frankenphp /usr/local/bin/frankenphp && \ +RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ + if [ -f /tmp/frankenphp-source/bin/frankenphp ]; then \ + cp /tmp/frankenphp-source/bin/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ From 3ae0cfae36cb113013400d34800253990a22da23 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 04:00:29 +0200 Subject: [PATCH 091/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 2 +- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- .github/workflows/build-centos-php-test-images-zts.yml | 4 ++-- .github/workflows/build-ubuntu-php-test-images-zts.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index da8137234..8aab93ad2 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -5,7 +5,7 @@ ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=scratch +ARG FRANKENPHP_IMAGE=quay.io/centos/centos:stream9 FROM ${BASE_IMAGE} AS base SHELL ["/bin/bash", "-euo", "pipefail", "-c"] diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 22bc78023..e9f6ed3c0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -5,7 +5,7 @@ ARG DEBIAN_FRONTEND=noninteractive ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=scratch +ARG FRANKENPHP_IMAGE=ubuntu:24.04 FROM ubuntu:24.04 AS base SHELL ["/bin/bash", "-eo", "pipefail", "-c"] diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 4ed234dc0..e4a583028 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -35,7 +35,7 @@ jobs: if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT else - echo "image=scratch" >> $GITHUB_OUTPUT + echo "image=quay.io/centos/centos:stream9" >> $GITHUB_OUTPUT fi - name: Build & push (amd64) @@ -74,7 +74,7 @@ jobs: if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT else - echo "image=scratch" >> $GITHUB_OUTPUT + echo "image=quay.io/centos/centos:stream9" >> $GITHUB_OUTPUT fi - name: Build & push (arm64) diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 8dda73249..a931de3fe 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -34,7 +34,7 @@ jobs: if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT else - echo "image=scratch" >> $GITHUB_OUTPUT + echo "image=ubuntu:24.04" >> $GITHUB_OUTPUT fi - uses: docker/build-push-action@v6 @@ -71,7 +71,7 @@ jobs: if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT else - echo "image=scratch" >> $GITHUB_OUTPUT + echo "image=ubuntu:24.04" >> $GITHUB_OUTPUT fi - uses: docker/build-push-action@v6 From d51bc5126f4a8ccc50c4625dcde72808821adcfb Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 04:04:13 +0200 Subject: [PATCH 092/170] ++++ --- .github/workflows/Dockerfile.centos-php-test-zts | 6 +++--- .github/workflows/Dockerfile.ubuntu-php-test-zts | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 8aab93ad2..7c33a6764 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -109,11 +109,11 @@ RUN case "$PHP_VERSION" in \ ;; \ esac -COPY --from=php-build /usr/local /usr/local +COPY --from=php-build /usr/local/bin /usr/local/bin RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ - if [ -f /tmp/frankenphp-source/bin/frankenphp ]; then \ - cp /tmp/frankenphp-source/bin/frankenphp /usr/local/bin/frankenphp && \ + if [ -f /tmp/frankenphp-source/frankenphp ]; then \ + cp /tmp/frankenphp-source/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index e9f6ed3c0..fb1c1f654 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -104,11 +104,11 @@ RUN case "$PHP_VERSION" in \ ;; \ esac -COPY --from=php-build /usr/local /usr/local +COPY --from=php-build /usr/local/bin /usr/local/bin RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ - if [ -f /tmp/frankenphp-source/bin/frankenphp ]; then \ - cp /tmp/frankenphp-source/bin/frankenphp /usr/local/bin/frankenphp && \ + if [ -f /tmp/frankenphp-source/frankenphp ]; then \ + cp /tmp/frankenphp-source/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ From 31012cdb3c1d480611e3a5724867af2b0f679a09 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 04:34:37 +0200 Subject: [PATCH 093/170] ++++ --- .../workflows/Dockerfile.centos-php-test-zts | 11 ++------ .../workflows/Dockerfile.ubuntu-php-test-zts | 11 ++------ .../build-centos-php-test-images-zts.yml | 28 ++++++++++++------- .../build-ubuntu-php-test-images-zts.yml | 26 +++++++++++------ 4 files changed, 42 insertions(+), 34 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7c33a6764..b5eef8c52 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -5,7 +5,6 @@ ARG BASE_IMAGE=quay.io/centos/centos:stream9 ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=quay.io/centos/centos:stream9 FROM ${BASE_IMAGE} AS base SHELL ["/bin/bash", "-euo", "pipefail", "-c"] @@ -93,10 +92,6 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Stage to extract FrankenPHP binary from official Docker image -# Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -FROM ${FRANKENPHP_IMAGE} AS frankenphp-source - FROM base AS final ARG PHP_VERSION @@ -111,9 +106,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local/bin /usr/local/bin -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ - if [ -f /tmp/frankenphp-source/frankenphp ]; then \ - cp /tmp/frankenphp-source/frankenphp /usr/local/bin/frankenphp && \ +COPY frankenphp-binary/ /tmp/frankenphp-binary/ +RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ + cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index fb1c1f654..fb7b7371a 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -5,7 +5,6 @@ ARG DEBIAN_FRONTEND=noninteractive ARG PHP_VERSION=8.3 ARG PHP_SRC_REF=PHP-${PHP_VERSION} -ARG FRANKENPHP_IMAGE=ubuntu:24.04 FROM ubuntu:24.04 AS base SHELL ["/bin/bash", "-eo", "pipefail", "-c"] @@ -87,10 +86,6 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Stage to extract FrankenPHP binary from official Docker image -# Using simple PHP version tags: php8.2, php8.3, php8.4, php8.5 -FROM ${FRANKENPHP_IMAGE} AS frankenphp-source - # Final image with PHP and test infrastructure FROM base AS final @@ -106,9 +101,9 @@ RUN case "$PHP_VERSION" in \ COPY --from=php-build /usr/local/bin /usr/local/bin -RUN --mount=type=bind,from=frankenphp-source,source=/usr/local,target=/tmp/frankenphp-source \ - if [ -f /tmp/frankenphp-source/frankenphp ]; then \ - cp /tmp/frankenphp-source/frankenphp /usr/local/bin/frankenphp && \ +COPY frankenphp-binary/ /tmp/frankenphp-binary/ +RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ + cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index e4a583028..b42de1637 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -28,14 +28,19 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Set FrankenPHP image - id: frankenphp_image + - name: Extract FrankenPHP binary from Docker image run: | PHP_VERSION="${{ matrix.php_version }}" if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + docker pull dunglas/frankenphp:php${PHP_VERSION} || true + docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true + docker rm temp-frankenphp || true + chmod +x frankenphp-binary/frankenphp || true else - echo "image=quay.io/centos/centos:stream9" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + echo "WARNING: FrankenPHP binary not found for PHP version ${PHP_VERSION}. Skipping FrankenPHP-specific tests." fi - name: Build & push (amd64) @@ -47,7 +52,6 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} - FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max @@ -67,14 +71,19 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Set FrankenPHP image - id: frankenphp_image + - name: Extract FrankenPHP binary from Docker image run: | PHP_VERSION="${{ matrix.php_version }}" if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + docker pull dunglas/frankenphp:php${PHP_VERSION} || true + docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true + docker rm temp-frankenphp || true + chmod +x frankenphp-binary/frankenphp || true else - echo "image=quay.io/centos/centos:stream9" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + touch frankenphp-binary/.keep fi - name: Build & push (arm64) @@ -86,7 +95,6 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} - FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index a931de3fe..402d29de1 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -27,14 +27,19 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Set FrankenPHP image - id: frankenphp_image + - name: Extract FrankenPHP binary from Docker image run: | PHP_VERSION="${{ matrix.php_version }}" if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + docker pull dunglas/frankenphp:php${PHP_VERSION} || true + docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true + docker rm temp-frankenphp || true + chmod +x frankenphp-binary/frankenphp || true else - echo "image=ubuntu:24.04" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + echo "WARNING: FrankenPHP binary not found for PHP version ${PHP_VERSION}. Skipping FrankenPHP-specific tests." fi - uses: docker/build-push-action@v6 @@ -64,14 +69,19 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Set FrankenPHP image - id: frankenphp_image + - name: Extract FrankenPHP binary from Docker image run: | PHP_VERSION="${{ matrix.php_version }}" if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - echo "image=dunglas/frankenphp:php${PHP_VERSION}" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + docker pull dunglas/frankenphp:php${PHP_VERSION} || true + docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true + docker rm temp-frankenphp || true + chmod +x frankenphp-binary/frankenphp || true else - echo "image=ubuntu:24.04" >> $GITHUB_OUTPUT + mkdir -p frankenphp-binary + touch frankenphp-binary/.keep fi - uses: docker/build-push-action@v6 From f81a9567aec1e0f3447f191baaedfbb5a44e1370 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 04:58:12 +0200 Subject: [PATCH 094/170] . --- .../workflows/Dockerfile.centos-php-test-zts | 2 - .../workflows/Dockerfile.ubuntu-php-test-zts | 2 - .../build-centos-php-test-images-zts.yml | 40 ++++++++---------- .../build-ubuntu-php-test-images-zts.yml | 42 ++++++++----------- 4 files changed, 36 insertions(+), 50 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index b5eef8c52..6b977fb56 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -104,8 +104,6 @@ RUN case "$PHP_VERSION" in \ ;; \ esac -COPY --from=php-build /usr/local/bin /usr/local/bin - COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index fb7b7371a..b8e9bfd69 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -99,8 +99,6 @@ RUN case "$PHP_VERSION" in \ ;; \ esac -COPY --from=php-build /usr/local/bin /usr/local/bin - COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index b42de1637..125b6e956 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -29,19 +29,17 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract FrankenPHP binary from Docker image + if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - mkdir -p frankenphp-binary - docker pull dunglas/frankenphp:php${PHP_VERSION} || true - docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true - docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true - docker rm temp-frankenphp || true - chmod +x frankenphp-binary/frankenphp || true - else - mkdir -p frankenphp-binary - echo "WARNING: FrankenPHP binary not found for PHP version ${PHP_VERSION}. Skipping FrankenPHP-specific tests." - fi + mkdir -p frankenphp-binary + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker rm temp-frankenphp + chmod +x frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp + file frankenphp-binary/frankenphp - name: Build & push (amd64) uses: docker/build-push-action@v6 @@ -72,19 +70,17 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract FrankenPHP binary from Docker image + if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - mkdir -p frankenphp-binary - docker pull dunglas/frankenphp:php${PHP_VERSION} || true - docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true - docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true - docker rm temp-frankenphp || true - chmod +x frankenphp-binary/frankenphp || true - else - mkdir -p frankenphp-binary - touch frankenphp-binary/.keep - fi + mkdir -p frankenphp-binary + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker rm temp-frankenphp + chmod +x frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp + file frankenphp-binary/frankenphp - name: Build & push (arm64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 402d29de1..b29179437 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -28,19 +28,17 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract FrankenPHP binary from Docker image + if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - mkdir -p frankenphp-binary - docker pull dunglas/frankenphp:php${PHP_VERSION} || true - docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true - docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true - docker rm temp-frankenphp || true - chmod +x frankenphp-binary/frankenphp || true - else - mkdir -p frankenphp-binary - echo "WARNING: FrankenPHP binary not found for PHP version ${PHP_VERSION}. Skipping FrankenPHP-specific tests." - fi + mkdir -p frankenphp-binary + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker rm temp-frankenphp + chmod +x frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp + file frankenphp-binary/frankenphp - uses: docker/build-push-action@v6 with: @@ -50,7 +48,6 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} - FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max @@ -70,19 +67,17 @@ jobs: username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Extract FrankenPHP binary from Docker image + if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - if awk "BEGIN {exit !($PHP_VERSION >= 8.2)}"; then - mkdir -p frankenphp-binary - docker pull dunglas/frankenphp:php${PHP_VERSION} || true - docker create --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} || true - docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp || true - docker rm temp-frankenphp || true - chmod +x frankenphp-binary/frankenphp || true - else - mkdir -p frankenphp-binary - touch frankenphp-binary/.keep - fi + mkdir -p frankenphp-binary + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker rm temp-frankenphp + chmod +x frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp + file frankenphp-binary/frankenphp - uses: docker/build-push-action@v6 with: @@ -92,7 +87,6 @@ jobs: push: true build-args: | PHP_VERSION=${{ matrix.php_version }} - FRANKENPHP_IMAGE=${{ steps.frankenphp_image.outputs.image }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max From 0769b5eab01001dafe0b9bfd2dcdbf2f483f0e4d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 05:00:19 +0200 Subject: [PATCH 095/170] . --- .github/workflows/build-centos-php-test-images-zts.yml | 8 ++++++-- .github/workflows/build-ubuntu-php-test-images-zts.yml | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 125b6e956..180782859 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -28,11 +28,13 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare FrankenPHP binary directory + run: mkdir -p frankenphp-binary + - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - mkdir -p frankenphp-binary docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp @@ -69,11 +71,13 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare FrankenPHP binary directory + run: mkdir -p frankenphp-binary + - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - mkdir -p frankenphp-binary docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index b29179437..7e014349c 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -27,11 +27,13 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare FrankenPHP binary directory + run: mkdir -p frankenphp-binary + - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - mkdir -p frankenphp-binary docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp @@ -66,11 +68,13 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Prepare FrankenPHP binary directory + run: mkdir -p frankenphp-binary + - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - mkdir -p frankenphp-binary docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp From a2e1bcebee44299504dbbac6a234cc0c611ba710 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 05:05:41 +0200 Subject: [PATCH 096/170] Remove PHP_VERSION argument and related conditional logic from Dockerfiles, simplifying the final image build process. --- .github/workflows/Dockerfile.centos-php-test-zts | 10 +--------- .github/workflows/Dockerfile.ubuntu-php-test-zts | 10 +--------- 2 files changed, 2 insertions(+), 18 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 6b977fb56..5de9d8162 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -94,15 +94,7 @@ RUN ./configure \ FROM base AS final -ARG PHP_VERSION -RUN case "$PHP_VERSION" in \ - 8.2|8.3|8.4|8.5) \ - echo "Using FrankenPHP Docker image for PHP_VERSION=${PHP_VERSION}"; \ - ;; \ - *) \ - echo "WARNING: FrankenPHP Docker images are officially only for PHP >= 8.2 (got PHP_VERSION=${PHP_VERSION}). Build will continue."; \ - ;; \ - esac +COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index b8e9bfd69..105eaaa5b 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -89,15 +89,7 @@ RUN ./configure \ # Final image with PHP and test infrastructure FROM base AS final -ARG PHP_VERSION -RUN case "$PHP_VERSION" in \ - 8.2|8.3|8.4|8.5) \ - echo "Using FrankenPHP Docker image for PHP_VERSION=${PHP_VERSION}"; \ - ;; \ - *) \ - echo "WARNING: FrankenPHP Docker images are officially only for PHP >= 8.2 (got PHP_VERSION=${PHP_VERSION}). Build will continue."; \ - ;; \ - esac +COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ From d94d16394d56d64088cc2afa9c5df0ecfd3fca90 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 05:22:24 +0200 Subject: [PATCH 097/170] Add --enable-embed option to PHP configuration in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 5de9d8162..8f1c9c1f8 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -82,6 +82,7 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ + --enable-embed \ --with-extra-version="" \ --with-curl \ --with-mysqli \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 105eaaa5b..68290252e 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -76,6 +76,7 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ + --enable-embed \ --with-extra-version="" \ --with-curl \ --with-mysqli \ From f00b123c2e43ab51b585829dc0c7cfc7ce668d89 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 05:32:05 +0200 Subject: [PATCH 098/170] Add libphp.so to Docker build process for CentOS and Ubuntu workflows --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + .github/workflows/build-centos-php-test-images-zts.yml | 10 ++++++---- .github/workflows/build-ubuntu-php-test-images-zts.yml | 10 ++++++---- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 8f1c9c1f8..2283064ea 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -100,6 +100,7 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ + cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 68290252e..1619fbf33 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -95,6 +95,7 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ + cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 180782859..496bb8d2f 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -38,10 +38,11 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp - file frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so + file frankenphp-binary/frankenphp frankenphp-binary/libphp.so - name: Build & push (amd64) uses: docker/build-push-action@v6 @@ -81,10 +82,11 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp - file frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so + file frankenphp-binary/frankenphp frankenphp-binary/libphp.so - name: Build & push (arm64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 7e014349c..e53013419 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -37,10 +37,11 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp - file frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so + file frankenphp-binary/frankenphp frankenphp-binary/libphp.so - uses: docker/build-push-action@v6 with: @@ -78,10 +79,11 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp - file frankenphp-binary/frankenphp + ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so + file frankenphp-binary/frankenphp frankenphp-binary/libphp.so - uses: docker/build-push-action@v6 with: From 1d453107978d7f68a1310d62e24999e70ca45c06 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 05:47:34 +0200 Subject: [PATCH 099/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 3 ++- .github/workflows/Dockerfile.ubuntu-php-test-zts | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 2283064ea..5e1f0421c 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -104,7 +104,8 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ + && mkdir -p /usr/lib/frankenphp/modules \ + && ldconfig; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 1619fbf33..c286140d9 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -99,7 +99,8 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules; \ + && mkdir -p /usr/lib/frankenphp/modules \ + && ldconfig; \ fi From 6bdec3c2232d7daa6065b88bea64018119d3c7d6 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 06:09:02 +0200 Subject: [PATCH 100/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 5e1f0421c..421efffd4 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -101,6 +101,7 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ + ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c286140d9..63ba3725c 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -96,6 +96,7 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ + ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ From 47b488b50e0b530330eabc407f692c1b8ba87f4f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 06:45:23 +0200 Subject: [PATCH 101/170] . --- .../workflows/Dockerfile.centos-php-test-zts | 4 ++-- .../workflows/Dockerfile.ubuntu-php-test-zts | 4 ++-- .../build-centos-php-test-images-zts.yml | 22 +++++++++---------- .../build-ubuntu-php-test-images-zts.yml | 22 +++++++++---------- 4 files changed, 24 insertions(+), 28 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 421efffd4..2012c4d68 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -100,8 +100,8 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ - ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so && \ + [ -d /tmp/frankenphp-binary/lib ] && cp -r /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ + [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 63ba3725c..83eb3e0a3 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -95,8 +95,8 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - cp /tmp/frankenphp-binary/libphp.so /usr/local/lib/libphp.so && \ - ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so && \ + [ -d /tmp/frankenphp-binary/lib ] && cp -r /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ + [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 496bb8d2f..f434795d0 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -34,15 +34,14 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/amd64 dunglas/frankenphp:8.4 + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:8.4 docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so + docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so - file frankenphp-binary/frankenphp frankenphp-binary/libphp.so + ls -lh frankenphp-binary/ || true + ls -lh frankenphp-binary/lib/ || true - name: Build & push (amd64) uses: docker/build-push-action@v6 @@ -78,15 +77,14 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/arm64 dunglas/frankenphp:8.4 + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:8.4 docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so + docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so - file frankenphp-binary/frankenphp frankenphp-binary/libphp.so + ls -lh frankenphp-binary/ || true + ls -lh frankenphp-binary/lib/ || true - name: Build & push (arm64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index e53013419..efb808add 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -33,15 +33,14 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/amd64 dunglas/frankenphp:8.4 + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:8.4 docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so + docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so - file frankenphp-binary/frankenphp frankenphp-binary/libphp.so + ls -lh frankenphp-binary/ || true + ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: @@ -75,15 +74,14 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/arm64 dunglas/frankenphp:8.4 + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:8.4 docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/libphp.so + docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - ls -lh frankenphp-binary/frankenphp frankenphp-binary/libphp.so - file frankenphp-binary/frankenphp frankenphp-binary/libphp.so + ls -lh frankenphp-binary/ || true + ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: From 6c0cbfd5c98b16cc0ca21e668325eb76b068418e Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 06:48:12 +0200 Subject: [PATCH 102/170] . --- .github/workflows/build-centos-php-test-images-zts.yml | 10 ++++++---- .github/workflows/build-ubuntu-php-test-images-zts.yml | 10 ++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index f434795d0..e0812ef9b 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -34,8 +34,9 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - docker pull --platform linux/amd64 dunglas/frankenphp:8.4 - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:8.4 + PHP_VERSION="${{ matrix.php_version }}" + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp @@ -77,8 +78,9 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - docker pull --platform linux/arm64 dunglas/frankenphp:8.4 - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:8.4 + PHP_VERSION="${{ matrix.php_version }}" + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index efb808add..b1df49587 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -33,8 +33,9 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - docker pull --platform linux/amd64 dunglas/frankenphp:8.4 - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:8.4 + PHP_VERSION="${{ matrix.php_version }}" + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp @@ -74,8 +75,9 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - docker pull --platform linux/arm64 dunglas/frankenphp:8.4 - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:8.4 + PHP_VERSION="${{ matrix.php_version }}" + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp From a8e95a36166d85159e235fb79b06468820c06717 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 06:56:21 +0200 Subject: [PATCH 103/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 7 +++++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 2012c4d68..09a66819f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -106,7 +106,9 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ - && ldconfig; \ + && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ + && ldconfig \ + && ls -lh /usr/local/lib/libphp.so || echo "WARNING: libphp.so not found in /usr/local/lib/"; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ @@ -121,7 +123,8 @@ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null -ENV PATH="/usr/local/bin:${PATH}" +ENV PATH="/usr/local/bin:${PATH}" \ + LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 83eb3e0a3..9d95e4b55 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -101,7 +101,9 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ - && ldconfig; \ + && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ + && ldconfig \ + && ls -lh /usr/local/lib/libphp.so || echo "WARNING: libphp.so not found in /usr/local/lib/"; \ fi @@ -117,7 +119,8 @@ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null -ENV PATH="/usr/local/bin:${PATH}" +ENV PATH="/usr/local/bin:${PATH}" \ + LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ From 342f060489b069a4cbb019ecd79cf2eca60e32ea Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 07:04:51 +0200 Subject: [PATCH 104/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 5 +++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 +++-- .github/workflows/build-centos-php-test-images-zts.yml | 5 ++++- .github/workflows/build-ubuntu-php-test-images-zts.yml | 4 +++- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 09a66819f..36ee8a52a 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -100,7 +100,7 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - [ -d /tmp/frankenphp-binary/lib ] && cp -r /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ + [ -d /tmp/frankenphp-binary/lib ] && cp -a /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ @@ -108,7 +108,8 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && ls -lh /usr/local/lib/libphp.so || echo "WARNING: libphp.so not found in /usr/local/lib/"; \ + && echo "Copied libraries:" && ls -lh /usr/local/lib/*.so* 2>/dev/null | head -20 || true \ + && echo "Checking libphp.so dependencies:" && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "libargon2|not found" || true; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9d95e4b55..a29e5828d 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -95,7 +95,7 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - [ -d /tmp/frankenphp-binary/lib ] && cp -r /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ + [ -d /tmp/frankenphp-binary/lib ] && cp -a /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ @@ -103,7 +103,8 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && ls -lh /usr/local/lib/libphp.so || echo "WARNING: libphp.so not found in /usr/local/lib/"; \ + && echo "Copied libraries:" && ls -lh /usr/local/lib/*.so* 2>/dev/null | head -20 || true \ + && echo "Checking libphp.so dependencies:" && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "libargon2|not found" || true; \ fi diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index e0812ef9b..09fcf7000 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -34,6 +34,7 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | + PHP_VERSION="${{ matrix.php_version }}" PHP_VERSION="${{ matrix.php_version }}" docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} @@ -41,8 +42,10 @@ jobs: docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp + echo "Extracted files:" ls -lh frankenphp-binary/ || true - ls -lh frankenphp-binary/lib/ || true + echo "Extracted libraries (looking for libargon2 and libphp):" + find frankenphp-binary/lib -name "*argon2*" -o -name "*libphp*" 2>/dev/null | head -10 || true - name: Build & push (amd64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index b1df49587..6a0feab4a 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -40,8 +40,10 @@ jobs: docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp + echo "Extracted files:" ls -lh frankenphp-binary/ || true - ls -lh frankenphp-binary/lib/ || true + echo "Extracted libraries (looking for libargon2 and libphp):" + find frankenphp-binary/lib -name "*argon2*" -o -name "*libphp*" 2>/dev/null | head -10 || true - uses: docker/build-push-action@v6 with: From 7b2b7daeb61f6052d17ac492e119643c907a11a9 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 07:16:17 +0200 Subject: [PATCH 105/170] . --- .github/workflows/build-centos-php-test-images-zts.yml | 2 +- .github/workflows/build-ubuntu-php-test-images-zts.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 09fcf7000..885523cc7 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -34,12 +34,12 @@ jobs: - name: Extract FrankenPHP binary from Docker image if: matrix.php_version >= '8.2' run: | - PHP_VERSION="${{ matrix.php_version }}" PHP_VERSION="${{ matrix.php_version }}" docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ + docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted files:" diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 6a0feab4a..c247ada22 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -38,6 +38,7 @@ jobs: docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ + docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted files:" From 8b5e00ad67aad8881f77ee19c80fcf115ec22910 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 07:26:14 +0200 Subject: [PATCH 106/170] Update Docker image references to use 'bookworm' tag for CentOS and Ubuntu workflows --- .github/workflows/build-centos-php-test-images-zts.yml | 4 ++-- .github/workflows/build-ubuntu-php-test-images-zts.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 885523cc7..1ebbe0455 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -35,8 +35,8 @@ jobs: if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index c247ada22..dc099dc94 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -34,8 +34,8 @@ jobs: if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm + docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true From 795b41d2bc49097a591c10622070d701da1acb4e Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 07:36:05 +0200 Subject: [PATCH 107/170] . --- .../workflows/Dockerfile.centos-php-test-zts | 7 +++---- .../workflows/Dockerfile.ubuntu-php-test-zts | 7 +++---- .../build-centos-php-test-images-nts.yml | 4 ++-- .../build-centos-php-test-images-zts.yml | 21 +++++++------------ .../build-ubuntu-php-test-images-nts.yml | 8 +++---- .../build-ubuntu-php-test-images-zts.yml | 21 +++++++------------ 6 files changed, 28 insertions(+), 40 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 36ee8a52a..de7ac696d 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -39,6 +39,7 @@ RUN yum install -y autoconf bison pkgconfig \ git wget \ python3 python3-devel python3-pip \ nginx httpd httpd-devel procps-ng mysql-server \ + libargon2 \ && yum clean all # Install mariadb-devel separately (may need different repo or skip if not critical) @@ -100,16 +101,14 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - [ -d /tmp/frankenphp-binary/lib ] && cp -a /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ - [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && echo "Copied libraries:" && ls -lh /usr/local/lib/*.so* 2>/dev/null | head -20 || true \ - && echo "Checking libphp.so dependencies:" && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "libargon2|not found" || true; \ + && echo "FrankenPHP installed. Checking libphp.so dependencies:" \ + && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "argon2|not found" || true; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index a29e5828d..85ba42758 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -26,6 +26,7 @@ RUN apt-get update && \ libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + libargon2-1 \ && rm -rf /var/lib/apt/lists/* # Timezone to UTC @@ -95,16 +96,14 @@ COPY --from=php-build /usr/local /usr/local COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - [ -d /tmp/frankenphp-binary/lib ] && cp -a /tmp/frankenphp-binary/lib/* /usr/local/lib/ || true && \ - [ -f /usr/local/lib/libphp.so ] && ln -sf /usr/local/lib/libphp.so /usr/local/bin/libphp.so || true && \ chmod +x /usr/local/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && echo "Copied libraries:" && ls -lh /usr/local/lib/*.so* 2>/dev/null | head -20 || true \ - && echo "Checking libphp.so dependencies:" && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "libargon2|not found" || true; \ + && echo "FrankenPHP installed. Checking libphp.so dependencies:" \ + && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "argon2|not found" || true; \ fi diff --git a/.github/workflows/build-centos-php-test-images-nts.yml b/.github/workflows/build-centos-php-test-images-nts.yml index 5ac637261..6546ac349 100644 --- a/.github/workflows/build-centos-php-test-images-nts.yml +++ b/.github/workflows/build-centos-php-test-images-nts.yml @@ -38,8 +38,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 1ebbe0455..b6beceb65 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -38,14 +38,10 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ - docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted files:" + echo "Extracted FrankenPHP binary:" ls -lh frankenphp-binary/ || true - echo "Extracted libraries (looking for libargon2 and libphp):" - find frankenphp-binary/lib -name "*argon2*" -o -name "*libphp*" 2>/dev/null | head -10 || true - name: Build & push (amd64) uses: docker/build-push-action@v6 @@ -57,8 +53,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm @@ -82,14 +78,13 @@ jobs: if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp + echo "Extracted FrankenPHP binary:" ls -lh frankenphp-binary/ || true - ls -lh frankenphp-binary/lib/ || true - name: Build & push (arm64) uses: docker/build-push-action@v6 @@ -101,8 +96,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max publish-manifests: runs-on: ubuntu-24.04 diff --git a/.github/workflows/build-ubuntu-php-test-images-nts.yml b/.github/workflows/build-ubuntu-php-test-images-nts.yml index 0e6cd4ebe..c218942a4 100644 --- a/.github/workflows/build-ubuntu-php-test-images-nts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-nts.yml @@ -36,8 +36,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm @@ -62,8 +62,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max publish-manifests: runs-on: ubuntu-24.04 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index dc099dc94..0f2252892 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -37,14 +37,10 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ - docker cp temp-frankenphp:/usr/lib/. frankenphp-binary/lib/ || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted files:" + echo "Extracted FrankenPHP binary:" ls -lh frankenphp-binary/ || true - echo "Extracted libraries (looking for libargon2 and libphp):" - find frankenphp-binary/lib -name "*argon2*" -o -name "*libphp*" 2>/dev/null | head -10 || true - uses: docker/build-push-action@v6 with: @@ -55,8 +51,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-amd64-${{ env.VERSION }},mode=max build-arm64: runs-on: ubuntu-24.04-arm @@ -79,14 +75,13 @@ jobs: if: matrix.php_version >= '8.2' run: | PHP_VERSION="${{ matrix.php_version }}" - docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION} - docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION} + docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm + docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - docker cp temp-frankenphp:/usr/local/lib/. frankenphp-binary/lib/ docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp + echo "Extracted FrankenPHP binary:" ls -lh frankenphp-binary/ || true - ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: @@ -97,8 +92,8 @@ jobs: build-args: | PHP_VERSION=${{ matrix.php_version }} tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} - cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max + #cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }} + #cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:cache-${{ matrix.php_version }}-arm64-${{ env.VERSION }},mode=max publish-manifests: runs-on: ubuntu-24.04 From 998c04939679a702f72db96aba02f3e9a7b020d1 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 07:59:11 +0200 Subject: [PATCH 108/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 5 +++-- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 +++-- .../workflows/build-centos-php-test-images-zts.yml | 14 ++++++++++---- .../workflows/build-ubuntu-php-test-images-zts.yml | 14 ++++++++++---- 4 files changed, 26 insertions(+), 12 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index de7ac696d..e1d5f90a9 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -102,13 +102,14 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ + [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && echo "FrankenPHP installed. Checking libphp.so dependencies:" \ - && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "argon2|not found" || true; \ + && echo "FrankenPHP installed. Checking dependencies:" \ + && ldd /usr/local/bin/frankenphp 2>/dev/null | grep -E "libphp|libargon2|not found" || true; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 85ba42758..a1700d6d0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -97,13 +97,14 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ + [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ && ldconfig \ - && echo "FrankenPHP installed. Checking libphp.so dependencies:" \ - && ldd /usr/local/lib/libphp.so 2>/dev/null | grep -E "argon2|not found" || true; \ + && echo "FrankenPHP installed. Checking dependencies:" \ + && ldd /usr/local/bin/frankenphp 2>/dev/null | grep -E "libphp|libargon2|not found" || true; \ fi diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index b6beceb65..8f1181fcd 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -38,10 +38,13 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + mkdir -p frankenphp-binary/lib + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP binary:" - ls -lh frankenphp-binary/ || true + echo "Extracted FrankenPHP files:" + ls -lh frankenphp-binary/ + ls -lh frankenphp-binary/lib/ || true - name: Build & push (amd64) uses: docker/build-push-action@v6 @@ -81,10 +84,13 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + mkdir -p frankenphp-binary/lib + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP binary:" - ls -lh frankenphp-binary/ || true + echo "Extracted FrankenPHP files:" + ls -lh frankenphp-binary/ + ls -lh frankenphp-binary/lib/ || true - name: Build & push (arm64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 0f2252892..8c0a46cef 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -37,10 +37,13 @@ jobs: docker pull --platform linux/amd64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + mkdir -p frankenphp-binary/lib + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP binary:" - ls -lh frankenphp-binary/ || true + echo "Extracted FrankenPHP files:" + ls -lh frankenphp-binary/ + ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: @@ -78,10 +81,13 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp + mkdir -p frankenphp-binary/lib + docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP binary:" - ls -lh frankenphp-binary/ || true + echo "Extracted FrankenPHP files:" + ls -lh frankenphp-binary/ + ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: From 170f78f56c170cecc6b19c46525245b03e5e5945 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 08:05:06 +0200 Subject: [PATCH 109/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 2 +- .github/workflows/Dockerfile.ubuntu-php-test-zts | 2 +- .github/workflows/build-centos-php-test-images-zts.yml | 2 ++ .github/workflows/build-ubuntu-php-test-images-zts.yml | 2 ++ 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index e1d5f90a9..6e7e5e548 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -39,7 +39,6 @@ RUN yum install -y autoconf bison pkgconfig \ git wget \ python3 python3-devel python3-pip \ nginx httpd httpd-devel procps-ng mysql-server \ - libargon2 \ && yum clean all # Install mariadb-devel separately (may need different repo or skip if not critical) @@ -103,6 +102,7 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ + [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index a1700d6d0..8b017c961 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -26,7 +26,6 @@ RUN apt-get update && \ libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ - libargon2-1 \ && rm -rf /var/lib/apt/lists/* # Timezone to UTC @@ -98,6 +97,7 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ + [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ && mkdir -p /usr/lib/frankenphp/modules \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 8f1181fcd..118baa0fa 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -40,6 +40,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/lib/x86_64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted FrankenPHP files:" @@ -86,6 +87,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted FrankenPHP files:" diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 8c0a46cef..72fae3bb6 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -39,6 +39,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/lib/x86_64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted FrankenPHP files:" @@ -83,6 +84,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp echo "Extracted FrankenPHP files:" From 2cca1ce3b8a5ca3d4044a7f7038caf2e42df64be Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 08:44:47 +0200 Subject: [PATCH 110/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + .github/workflows/build-centos-php-test-images-zts.yml | 2 ++ .github/workflows/build-ubuntu-php-test-images-zts.yml | 2 ++ 4 files changed, 6 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 6e7e5e548..ab6316892 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -102,6 +102,7 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ + [ -f /tmp/frankenphp-binary/lib/libwatcher-c.so.0 ] && cp /tmp/frankenphp-binary/lib/libwatcher-c.so.0 /usr/local/lib/libwatcher-c.so.0 || true && \ [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 8b017c961..58184285a 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -97,6 +97,7 @@ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ + [ -f /tmp/frankenphp-binary/lib/libwatcher-c.so.0 ] && cp /tmp/frankenphp-binary/lib/libwatcher-c.so.0 /usr/local/lib/libwatcher-c.so.0 || true && \ [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ mkdir -p /etc/frankenphp/caddy.d \ && mkdir -p /etc/frankenphp/php.d \ diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index 118baa0fa..fa8692d8f 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -40,6 +40,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true docker cp temp-frankenphp:/lib/x86_64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp @@ -87,6 +88,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index 72fae3bb6..e09d91230 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -39,6 +39,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true docker cp temp-frankenphp:/lib/x86_64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp @@ -84,6 +85,7 @@ jobs: docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp chmod +x frankenphp-binary/frankenphp From 9167478af0982aed7a57da50aad30b3f1b460770 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 17:37:10 +0000 Subject: [PATCH 111/170] Build libwatcher for frankenphp + binaries from the official images --- .../workflows/Dockerfile.centos-php-test-zts | 36 +++++++++++-------- .../workflows/Dockerfile.ubuntu-php-test-zts | 29 ++++++++------- .../build-centos-php-test-images-zts.yml | 11 +----- .../build-ubuntu-php-test-images-zts.yml | 9 ----- 4 files changed, 39 insertions(+), 46 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index ab6316892..5e30a3faa 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -19,7 +19,7 @@ RUN yum install -y yum-utils && \ dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true # Install minimal tools needed for re2c build (replace curl-minimal with full curl) -RUN yum install -y xz tar gcc gcc-c++ make +RUN yum install -y xz tar gcc gcc-c++ make cmake ENV RE2C_VERSION=3.1 RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ @@ -93,6 +93,19 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +WORKDIR /usr/src +RUN git clone https://github.com/e-dant/watcher.git +WORKDIR /usr/src/watcher +RUN cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DBUILD_LIB=ON \ + -DBUILD_BIN=ON \ + -DBUILD_HDR=ON && \ + cmake --build build && \ + cmake --install build && \ + ldconfig + FROM base AS final COPY --from=php-build /usr/local /usr/local @@ -101,16 +114,9 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ - [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ - [ -f /tmp/frankenphp-binary/lib/libwatcher-c.so.0 ] && cp /tmp/frankenphp-binary/lib/libwatcher-c.so.0 /usr/local/lib/libwatcher-c.so.0 || true && \ - [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ - mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules \ - && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ - && ldconfig \ - && echo "FrankenPHP installed. Checking dependencies:" \ - && ldd /usr/local/bin/frankenphp 2>/dev/null | grep -E "libphp|libargon2|not found" || true; \ + mkdir -p /etc/frankenphp/caddy.d && \ + mkdir -p /etc/frankenphp/php.d && \ + mkdir -p /usr/lib/frankenphp/modules; \ fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ @@ -176,10 +182,10 @@ RUN mkdir -p /etc/php-fpm.d && \ RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini RUN if [ -f /usr/local/bin/frankenphp ]; then \ - frankenphp -v || echo "Warning: frankenphp version check failed"; \ - else \ - echo "WARNING: frankenphp binary not found; FrankenPHP-specific tests will be skipped."; \ - fi + frankenphp -v || echo "Warning: frankenphp version check failed"; \ + else \ + echo "WARNING: frankenphp binary not found; FrankenPHP-specific tests will be skipped."; \ + fi # Python deps used by test harness RUN python3 -m pip install --no-cache-dir --upgrade pip && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 58184285a..4df410a40 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -25,7 +25,7 @@ RUN apt-get update && \ build-essential autoconf bison re2c pkg-config \ libxml2-dev libsqlite3-dev libcurl4-openssl-dev libssl-dev \ libzip-dev libonig-dev libjpeg-dev libpng-dev libwebp-dev \ - libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev \ + libicu-dev libreadline-dev libxslt1-dev default-libmysqlclient-dev cmake \ && rm -rf /var/lib/apt/lists/* # Timezone to UTC @@ -76,7 +76,6 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ - --enable-embed \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -87,6 +86,19 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +WORKDIR /usr/src +RUN git clone https://github.com/e-dant/watcher.git +WORKDIR /usr/src/watcher +RUN cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DBUILD_LIB=ON \ + -DBUILD_BIN=ON \ + -DBUILD_HDR=ON && \ + cmake --build build && \ + cmake --install build && \ + ldconfig + # Final image with PHP and test infrastructure FROM base AS final @@ -96,16 +108,9 @@ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ chmod +x /usr/local/bin/frankenphp && \ - [ -f /tmp/frankenphp-binary/lib/libphp.so ] && cp /tmp/frankenphp-binary/lib/libphp.so /usr/local/lib/libphp.so || true && \ - [ -f /tmp/frankenphp-binary/lib/libwatcher-c.so.0 ] && cp /tmp/frankenphp-binary/lib/libwatcher-c.so.0 /usr/local/lib/libwatcher-c.so.0 || true && \ - [ -f /tmp/frankenphp-binary/lib/libargon2.so.1 ] && cp /tmp/frankenphp-binary/lib/libargon2.so.1 /usr/local/lib/libargon2.so.1 || true && \ - mkdir -p /etc/frankenphp/caddy.d \ - && mkdir -p /etc/frankenphp/php.d \ - && mkdir -p /usr/lib/frankenphp/modules \ - && echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf \ - && ldconfig \ - && echo "FrankenPHP installed. Checking dependencies:" \ - && ldd /usr/local/bin/frankenphp 2>/dev/null | grep -E "libphp|libargon2|not found" || true; \ + mkdir -p /etc/frankenphp/caddy.d && \ + mkdir -p /etc/frankenphp/php.d && \ + mkdir -p /usr/lib/frankenphp/modules; \ fi diff --git a/.github/workflows/build-centos-php-test-images-zts.yml b/.github/workflows/build-centos-php-test-images-zts.yml index fa8692d8f..0a1af9973 100644 --- a/.github/workflows/build-centos-php-test-images-zts.yml +++ b/.github/workflows/build-centos-php-test-images-zts.yml @@ -39,7 +39,7 @@ jobs: docker create --platform linux/amd64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp mkdir -p frankenphp-binary/lib - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so + echo "NOTE: Not copying libphp.so for CentOS (GLIBC 2.34) - using compiled version" docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true docker cp temp-frankenphp:/lib/x86_64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true docker rm temp-frankenphp @@ -86,15 +86,6 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - mkdir -p frankenphp-binary/lib - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so - docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true - docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true - docker rm temp-frankenphp - chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP files:" - ls -lh frankenphp-binary/ - ls -lh frankenphp-binary/lib/ || true - name: Build & push (arm64) uses: docker/build-push-action@v6 diff --git a/.github/workflows/build-ubuntu-php-test-images-zts.yml b/.github/workflows/build-ubuntu-php-test-images-zts.yml index e09d91230..c95b5503b 100644 --- a/.github/workflows/build-ubuntu-php-test-images-zts.yml +++ b/.github/workflows/build-ubuntu-php-test-images-zts.yml @@ -83,15 +83,6 @@ jobs: docker pull --platform linux/arm64 dunglas/frankenphp:php${PHP_VERSION}-bookworm docker create --platform linux/arm64 --name temp-frankenphp dunglas/frankenphp:php${PHP_VERSION}-bookworm docker cp temp-frankenphp:/usr/local/bin/frankenphp frankenphp-binary/frankenphp - mkdir -p frankenphp-binary/lib - docker cp temp-frankenphp:/usr/local/lib/libphp.so frankenphp-binary/lib/libphp.so - docker cp temp-frankenphp:/usr/local/lib/libwatcher-c.so.0 frankenphp-binary/lib/libwatcher-c.so.0 || true - docker cp temp-frankenphp:/lib/aarch64-linux-gnu/libargon2.so.1 frankenphp-binary/lib/libargon2.so.1 || true - docker rm temp-frankenphp - chmod +x frankenphp-binary/frankenphp - echo "Extracted FrankenPHP files:" - ls -lh frankenphp-binary/ - ls -lh frankenphp-binary/lib/ || true - uses: docker/build-push-action@v6 with: From 324affd81bae2e9900cd70376b10083db022cca6 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Fri, 12 Dec 2025 17:39:27 +0000 Subject: [PATCH 112/170] Remove --enable-embed option from PHP configuration in CentOS Dockerfile --- .github/workflows/Dockerfile.centos-php-test-zts | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 5e30a3faa..018e31aa1 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -82,7 +82,6 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ - --enable-embed \ --with-extra-version="" \ --with-curl \ --with-mysqli \ From 00117776589eaac76b6ef3ebbae414b64735a4c2 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 03:11:54 +0000 Subject: [PATCH 113/170] Integrate static lexbor library into PHP builds for CentOS and Ubuntu Dockerfiles, ensuring proper linking and verification of embedded symbols. --- .../workflows/Dockerfile.centos-php-test-zts | 42 +++++++++++++++++-- .../workflows/Dockerfile.ubuntu-php-test-zts | 40 +++++++++++++++++- 2 files changed, 77 insertions(+), 5 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 018e31aa1..f2b21a868 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -44,6 +44,23 @@ RUN yum install -y autoconf bison pkgconfig \ # Install mariadb-devel separately (may need different repo or skip if not critical) RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" +# Build and install lexbor library (STATIC - embedded into libphp.so) +ENV LEXBOR_VERSION=2.4.0 +RUN git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ + cd /tmp/lexbor-src && \ + cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DLEXBOR_BUILD_SHARED=OFF \ + -DLEXBOR_BUILD_STATIC=ON \ + -DLEXBOR_INSTALL_HEADERS=ON && \ + cmake --build build && \ + cmake --install build && \ + echo "Lexbor STATIC library installed:" && \ + ls -la /usr/local/lib*/liblexbor* || true && \ + ls -la /usr/local/include/lexbor/ || true && \ + rm -rf /tmp/lexbor-src + # Fetch and build PHP from source with ZTS FROM base AS php-build ARG PHP_SRC_REF @@ -73,6 +90,16 @@ RUN mkdir -p /usr/local/etc/php/conf.d # Build PHP with ZTS enabled RUN ./configure \ +COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor +COPY --from=base /usr/local/lib /usr/local/lib +COPY --from=base /usr/local/lib64 /usr/local/lib64 + +# Build PHP with ZTS enabled and statically link lexbor +RUN export CFLAGS="-I/usr/local/include" && \ + export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64" && \ + export LIBS="/usr/local/lib64/liblexbor_static.a" && \ + ls -lh /usr/local/lib*/liblexbor* && \ + ./configure \ --prefix=/usr/local \ --with-config-file-path=/usr/local/lib \ --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ @@ -82,6 +109,13 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ + --enable-embed \ + --enable-dom \ + --enable-xml \ + --enable-simplexml \ + --enable-xmlreader \ + --enable-xmlwriter \ + --with-xsl \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -103,12 +137,14 @@ RUN cmake -S . -B build \ -DBUILD_HDR=ON && \ cmake --build build && \ cmake --install build && \ - ldconfig + ldconfig /usr/local/lib /usr/local/lib64 FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN ldconfig /usr/local/lib /usr/local/lib64 + COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ @@ -123,7 +159,7 @@ RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ echo "Error: Could not determine extension_dir"; \ exit 1; \ fi && \ - mkdir -p "$EXTENSION_DIR" \ + mkdir -p "$EXTENSION_DIR" && \ echo "Created extension_dir: $EXTENSION_DIR" # Verify ZTS is enabled @@ -131,7 +167,7 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" \ - LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}" + LD_LIBRARY_PATH="/usr/local/lib:/usr/local/lib64:${LD_LIBRARY_PATH:-}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 4df410a40..7fb5fa61f 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -33,6 +33,22 @@ RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ echo "${TZ}" > /etc/timezone && \ dpkg-reconfigure -f noninteractive tzdata +ENV LEXBOR_VERSION=2.4.0 +RUN git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ + cd /tmp/lexbor-src && \ + cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DLEXBOR_BUILD_SHARED=OFF \ + -DLEXBOR_BUILD_STATIC=ON \ + -DLEXBOR_INSTALL_HEADERS=ON && \ + cmake --build build && \ + cmake --install build && \ + echo "Lexbor STATIC library installed:" && \ + ls -la /usr/local/lib*/liblexbor* || true && \ + ls -la /usr/local/include/lexbor/ || true && \ + rm -rf /tmp/lexbor-src + # Fetch and build PHP from source with ZTS FROM base AS php-build ARG PHP_SRC_REF @@ -65,8 +81,19 @@ RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ a2enmod mpm_prefork rewrite cgi cgid || true +# Copy lexbor static library and headers from base stage +COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor +COPY --from=base /usr/local/lib /usr/local/lib +COPY --from=base /usr/local/lib64 /usr/local/lib64 + +# Build PHP with ZTS enabled and statically link lexbor +RUN export CFLAGS="-I/usr/local/include" && \ + export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64" && \ + export LIBS="/usr/local/lib64/liblexbor_static.a" && \ # Build PHP with ZTS enabled RUN ./configure \ + ls -lh /usr/local/lib*/liblexbor* && \ + ./configure \ --prefix=/usr/local \ --with-config-file-path=/usr/local/lib \ --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ @@ -76,6 +103,13 @@ RUN ./configure \ --enable-mbstring \ --enable-pcntl \ --enable-cgi \ + --enable-embed \ + --enable-dom \ + --enable-xml \ + --enable-simplexml \ + --enable-xmlreader \ + --enable-xmlwriter \ + --with-xsl \ --with-extra-version="" \ --with-curl \ --with-mysqli \ @@ -97,13 +131,15 @@ RUN cmake -S . -B build \ -DBUILD_HDR=ON && \ cmake --build build && \ cmake --install build && \ - ldconfig + ldconfig /usr/local/lib /usr/local/lib64 # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local +RUN ldconfig /usr/local/lib /usr/local/lib64 + COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ @@ -127,7 +163,7 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" \ - LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}" + LD_LIBRARY_PATH="/usr/local/lib:/usr/local/lib64:${LD_LIBRARY_PATH:-}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ From 7255eb1d17bef7c8ed6b1b740dc6545fbd06f50d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 03:18:37 +0000 Subject: [PATCH 114/170] . --- .../workflows/Dockerfile.centos-php-test-zts | 10 ++++++-- .../workflows/Dockerfile.ubuntu-php-test-zts | 23 +++++++++++-------- 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index f2b21a868..9556287da 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -88,8 +88,7 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d -# Build PHP with ZTS enabled -RUN ./configure \ +# Copy lexbor static library and headers from base stage COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor COPY --from=base /usr/local/lib /usr/local/lib COPY --from=base /usr/local/lib64 /usr/local/lib64 @@ -98,6 +97,7 @@ COPY --from=base /usr/local/lib64 /usr/local/lib64 RUN export CFLAGS="-I/usr/local/include" && \ export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64" && \ export LIBS="/usr/local/lib64/liblexbor_static.a" && \ + echo "Building PHP with static lexbor:" && \ ls -lh /usr/local/lib*/liblexbor* && \ ./configure \ --prefix=/usr/local \ @@ -126,6 +126,12 @@ RUN export CFLAGS="-I/usr/local/include" && \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +# Verify lexbor symbols are embedded in libphp.so +RUN echo "Checking if libphp.so has EMBEDDED lexbor symbols (static linking):" && \ + nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ + echo "✓ Lexbor symbols found - static linking successful!" || \ + echo "✗ WARNING: No lexbor symbols found in libphp.so" + WORKDIR /usr/src RUN git clone https://github.com/e-dant/watcher.git WORKDIR /usr/src/watcher diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 7fb5fa61f..88b8640a0 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -33,6 +33,7 @@ RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ echo "${TZ}" > /etc/timezone && \ dpkg-reconfigure -f noninteractive tzdata +# Build and install lexbor library (STATIC - embedded into libphp.so) ENV LEXBOR_VERSION=2.4.0 RUN git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ cd /tmp/lexbor-src && \ @@ -84,15 +85,13 @@ RUN a2dismod mpm_event || true && \ # Copy lexbor static library and headers from base stage COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor COPY --from=base /usr/local/lib /usr/local/lib -COPY --from=base /usr/local/lib64 /usr/local/lib64 # Build PHP with ZTS enabled and statically link lexbor RUN export CFLAGS="-I/usr/local/include" && \ - export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64" && \ - export LIBS="/usr/local/lib64/liblexbor_static.a" && \ -# Build PHP with ZTS enabled -RUN ./configure \ - ls -lh /usr/local/lib*/liblexbor* && \ + export LDFLAGS="-L/usr/local/lib" && \ + export LIBS="/usr/local/lib/liblexbor_static.a" && \ + echo "Building PHP with static lexbor:" && \ + ls -lh /usr/local/lib/liblexbor* && \ ./configure \ --prefix=/usr/local \ --with-config-file-path=/usr/local/lib \ @@ -120,6 +119,12 @@ RUN ./configure \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true +# Verify lexbor symbols are embedded in libphp.so +RUN echo "Checking if libphp.so has EMBEDDED lexbor symbols (static linking):" && \ + nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ + echo "✓ Lexbor symbols found - static linking successful!" || \ + echo "✗ WARNING: No lexbor symbols found in libphp.so" + WORKDIR /usr/src RUN git clone https://github.com/e-dant/watcher.git WORKDIR /usr/src/watcher @@ -131,14 +136,14 @@ RUN cmake -S . -B build \ -DBUILD_HDR=ON && \ cmake --build build && \ cmake --install build && \ - ldconfig /usr/local/lib /usr/local/lib64 + ldconfig # Final image with PHP and test infrastructure FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN ldconfig /usr/local/lib /usr/local/lib64 +RUN ldconfig COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ @@ -163,7 +168,7 @@ RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ php -m | grep -E 'curl|mysqli' >/dev/null ENV PATH="/usr/local/bin:${PATH}" \ - LD_LIBRARY_PATH="/usr/local/lib:/usr/local/lib64:${LD_LIBRARY_PATH:-}" + LD_LIBRARY_PATH="/usr/local/lib:${LD_LIBRARY_PATH:-}" RUN ln -sf /usr/local/bin/php /usr/bin/php && \ ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ From 2df2e2fdf40f104126b9a4ee7491b09e29382657 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 03:28:32 +0000 Subject: [PATCH 115/170] + --- .../workflows/Dockerfile.centos-php-test-zts | 73 ++++++++++++------- .../workflows/Dockerfile.ubuntu-php-test-zts | 73 ++++++++++++------- 2 files changed, 90 insertions(+), 56 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 9556287da..7f9e00214 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -44,22 +44,28 @@ RUN yum install -y autoconf bison pkgconfig \ # Install mariadb-devel separately (may need different repo or skip if not critical) RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" -# Build and install lexbor library (STATIC - embedded into libphp.so) +# Build and install lexbor library (STATIC - embedded into libphp.so) - only for PHP 8.4+ +ARG PHP_VERSION ENV LEXBOR_VERSION=2.4.0 -RUN git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ - cd /tmp/lexbor-src && \ - cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DLEXBOR_BUILD_SHARED=OFF \ - -DLEXBOR_BUILD_STATIC=ON \ - -DLEXBOR_INSTALL_HEADERS=ON && \ - cmake --build build && \ - cmake --install build && \ - echo "Lexbor STATIC library installed:" && \ - ls -la /usr/local/lib*/liblexbor* || true && \ - ls -la /usr/local/include/lexbor/ || true && \ - rm -rf /tmp/lexbor-src +RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ + echo "Building lexbor for PHP ${PHP_VERSION} (8.4+)"; \ + git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ + cd /tmp/lexbor-src && \ + cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DLEXBOR_BUILD_SHARED=OFF \ + -DLEXBOR_BUILD_STATIC=ON \ + -DLEXBOR_INSTALL_HEADERS=ON && \ + cmake --build build && \ + cmake --install build && \ + echo "Lexbor STATIC library installed:" && \ + ls -la /usr/local/lib*/liblexbor* || true && \ + ls -la /usr/local/include/lexbor/ || true && \ + rm -rf /tmp/lexbor-src; \ + else \ + echo "Skipping lexbor build for PHP ${PHP_VERSION} (< 8.4)"; \ + fi # Fetch and build PHP from source with ZTS FROM base AS php-build @@ -88,17 +94,23 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d -# Copy lexbor static library and headers from base stage -COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor +# Copy lexbor static library and headers from base stage (only if PHP 8.4+) +ARG PHP_VERSION +RUN if [ -d /tmp/dummy ]; then mkdir -p /usr/local/include/lexbor /usr/local/lib64; fi +COPY --from=base /usr/local/include/lexbor* /usr/local/include/ 2>/dev/null || true COPY --from=base /usr/local/lib /usr/local/lib COPY --from=base /usr/local/lib64 /usr/local/lib64 -# Build PHP with ZTS enabled and statically link lexbor -RUN export CFLAGS="-I/usr/local/include" && \ - export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64" && \ - export LIBS="/usr/local/lib64/liblexbor_static.a" && \ - echo "Building PHP with static lexbor:" && \ - ls -lh /usr/local/lib*/liblexbor* && \ +# Build PHP with ZTS enabled and conditionally link lexbor for PHP 8.4+ +RUN if [ -f /usr/local/lib64/liblexbor_static.a ]; then \ + echo "Building PHP ${PHP_VERSION} with static lexbor (8.4+)"; \ + export CFLAGS="-I/usr/local/include"; \ + export LDFLAGS="-L/usr/local/lib -L/usr/local/lib64"; \ + export LIBS="/usr/local/lib64/liblexbor_static.a"; \ + ls -lh /usr/local/lib*/liblexbor* || true; \ + else \ + echo "Building PHP ${PHP_VERSION} without lexbor (< 8.4)"; \ + fi && \ ./configure \ --prefix=/usr/local \ --with-config-file-path=/usr/local/lib \ @@ -126,11 +138,16 @@ RUN export CFLAGS="-I/usr/local/include" && \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Verify lexbor symbols are embedded in libphp.so -RUN echo "Checking if libphp.so has EMBEDDED lexbor symbols (static linking):" && \ - nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ - echo "✓ Lexbor symbols found - static linking successful!" || \ - echo "✗ WARNING: No lexbor symbols found in libphp.so" +# Verify lexbor symbols are embedded in libphp.so (only for PHP 8.4+) +ARG PHP_VERSION +RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ + echo "Checking if libphp.so has EMBEDDED lexbor symbols (PHP ${PHP_VERSION}):" && \ + nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ + echo "✓ Lexbor symbols found - static linking successful!" || \ + echo "✗ WARNING: No lexbor symbols found in libphp.so"; \ + else \ + echo "Skipping lexbor verification for PHP ${PHP_VERSION} (< 8.4)"; \ + fi WORKDIR /usr/src RUN git clone https://github.com/e-dant/watcher.git diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 88b8640a0..9d5b85fce 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -33,22 +33,28 @@ RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ echo "${TZ}" > /etc/timezone && \ dpkg-reconfigure -f noninteractive tzdata -# Build and install lexbor library (STATIC - embedded into libphp.so) +# Build and install lexbor library (STATIC - embedded into libphp.so) - only for PHP 8.4+ +ARG PHP_VERSION ENV LEXBOR_VERSION=2.4.0 -RUN git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ - cd /tmp/lexbor-src && \ - cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DLEXBOR_BUILD_SHARED=OFF \ - -DLEXBOR_BUILD_STATIC=ON \ - -DLEXBOR_INSTALL_HEADERS=ON && \ - cmake --build build && \ - cmake --install build && \ - echo "Lexbor STATIC library installed:" && \ - ls -la /usr/local/lib*/liblexbor* || true && \ - ls -la /usr/local/include/lexbor/ || true && \ - rm -rf /tmp/lexbor-src +RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ + echo "Building lexbor for PHP ${PHP_VERSION} (8.4+)"; \ + git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ + cd /tmp/lexbor-src && \ + cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DLEXBOR_BUILD_SHARED=OFF \ + -DLEXBOR_BUILD_STATIC=ON \ + -DLEXBOR_INSTALL_HEADERS=ON && \ + cmake --build build && \ + cmake --install build && \ + echo "Lexbor STATIC library installed:" && \ + ls -la /usr/local/lib*/liblexbor* || true && \ + ls -la /usr/local/include/lexbor/ || true && \ + rm -rf /tmp/lexbor-src; \ + else \ + echo "Skipping lexbor build for PHP ${PHP_VERSION} (< 8.4)"; \ + fi # Fetch and build PHP from source with ZTS FROM base AS php-build @@ -82,16 +88,22 @@ RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ a2enmod mpm_prefork rewrite cgi cgid || true -# Copy lexbor static library and headers from base stage -COPY --from=base /usr/local/include/lexbor /usr/local/include/lexbor +# Copy lexbor static library and headers from base stage (only if PHP 8.4+) +ARG PHP_VERSION +RUN mkdir -p /usr/local/include /usr/local/lib +COPY --from=base /usr/local/include/lexbor* /usr/local/include/ 2>/dev/null || true COPY --from=base /usr/local/lib /usr/local/lib -# Build PHP with ZTS enabled and statically link lexbor -RUN export CFLAGS="-I/usr/local/include" && \ - export LDFLAGS="-L/usr/local/lib" && \ - export LIBS="/usr/local/lib/liblexbor_static.a" && \ - echo "Building PHP with static lexbor:" && \ - ls -lh /usr/local/lib/liblexbor* && \ +# Build PHP with ZTS enabled and conditionally link lexbor for PHP 8.4+ +RUN if [ -f /usr/local/lib/liblexbor_static.a ]; then \ + echo "Building PHP ${PHP_VERSION} with static lexbor (8.4+)"; \ + export CFLAGS="-I/usr/local/include"; \ + export LDFLAGS="-L/usr/local/lib"; \ + export LIBS="/usr/local/lib/liblexbor_static.a"; \ + ls -lh /usr/local/lib/liblexbor* || true; \ + else \ + echo "Building PHP ${PHP_VERSION} without lexbor (< 8.4)"; \ + fi && \ ./configure \ --prefix=/usr/local \ --with-config-file-path=/usr/local/lib \ @@ -119,11 +131,16 @@ RUN export CFLAGS="-I/usr/local/include" && \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true -# Verify lexbor symbols are embedded in libphp.so -RUN echo "Checking if libphp.so has EMBEDDED lexbor symbols (static linking):" && \ - nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ - echo "✓ Lexbor symbols found - static linking successful!" || \ - echo "✗ WARNING: No lexbor symbols found in libphp.so" +# Verify lexbor symbols are embedded in libphp.so (only for PHP 8.4+) +ARG PHP_VERSION +RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ + echo "Checking if libphp.so has EMBEDDED lexbor symbols (PHP ${PHP_VERSION}):" && \ + nm /usr/local/lib/libphp.so | grep lxb_ | head -n 5 && \ + echo "✓ Lexbor symbols found - static linking successful!" || \ + echo "✗ WARNING: No lexbor symbols found in libphp.so"; \ + else \ + echo "Skipping lexbor verification for PHP ${PHP_VERSION} (< 8.4)"; \ + fi WORKDIR /usr/src RUN git clone https://github.com/e-dant/watcher.git From 56c6a788f87592e49d46e5678ea4f10aaf7c7502 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 03:31:36 +0000 Subject: [PATCH 116/170] . --- .github/workflows/Dockerfile.centos-php-test-zts | 5 ++--- .github/workflows/Dockerfile.ubuntu-php-test-zts | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 7f9e00214..41ea2fb24 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -94,10 +94,9 @@ RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_ RUN mkdir -p /usr/local/etc/php/conf.d -# Copy lexbor static library and headers from base stage (only if PHP 8.4+) +# Copy lexbor static library and headers from base stage (includes lexbor if PHP 8.4+) ARG PHP_VERSION -RUN if [ -d /tmp/dummy ]; then mkdir -p /usr/local/include/lexbor /usr/local/lib64; fi -COPY --from=base /usr/local/include/lexbor* /usr/local/include/ 2>/dev/null || true +COPY --from=base /usr/local/include /usr/local/include COPY --from=base /usr/local/lib /usr/local/lib COPY --from=base /usr/local/lib64 /usr/local/lib64 diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 9d5b85fce..a194be7e8 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -88,10 +88,9 @@ RUN a2dismod mpm_event || true && \ a2dismod mpm_worker || true && \ a2enmod mpm_prefork rewrite cgi cgid || true -# Copy lexbor static library and headers from base stage (only if PHP 8.4+) +# Copy lexbor static library and headers from base stage (includes lexbor if PHP 8.4+) ARG PHP_VERSION -RUN mkdir -p /usr/local/include /usr/local/lib -COPY --from=base /usr/local/include/lexbor* /usr/local/include/ 2>/dev/null || true +COPY --from=base /usr/local/include /usr/local/include COPY --from=base /usr/local/lib /usr/local/lib # Build PHP with ZTS enabled and conditionally link lexbor for PHP 8.4+ From 59c45d32df4f3529b63bf940db6575abf9f33940 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 03:58:03 +0000 Subject: [PATCH 117/170] . --- lib/request-processor/handle_urls.go | 2 +- .../ssrf/checkBlockedDomain.go | 14 ++- .../ssrf/checkBlockedDomain_test.go | 92 +++++++++---------- 3 files changed, 57 insertions(+), 51 deletions(-) diff --git a/lib/request-processor/handle_urls.go b/lib/request-processor/handle_urls.go index 2c4a6ffc2..a11bef975 100644 --- a/lib/request-processor/handle_urls.go +++ b/lib/request-processor/handle_urls.go @@ -27,7 +27,7 @@ func OnPreOutgoingRequest(inst *instance.RequestProcessorInstance) string { operation := context.GetFunctionName(inst) // Check if the domain is blocked based on cloud configuration - if !context.IsIpBypassed(inst) && ssrf.IsBlockedOutboundDomain(hostname) { + if !context.IsIpBypassed(inst) && ssrf.IsBlockedOutboundDomainWithInst(inst, hostname) { server := inst.GetCurrentServer() // Blocked domains should also be reported to the agent. if server != nil { diff --git a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain.go b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain.go index 23018884c..e1ccbc970 100644 --- a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain.go +++ b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain.go @@ -1,14 +1,24 @@ package ssrf import ( - "main/globals" + . "main/aikido_types" "main/helpers" + "main/instance" ) // IsBlockedOutboundDomain checks if an outbound request to a hostname should be blocked // based on the cloud configuration for blocked/allowed domains func IsBlockedOutboundDomain(hostname string) bool { - server := globals.GetCurrentServer() + return IsBlockedOutboundDomainWithInst(nil, hostname) +} + +// IsBlockedOutboundDomainWithInst checks if an outbound request to a hostname should be blocked +// based on the cloud configuration for blocked/allowed domains +func IsBlockedOutboundDomainWithInst(inst *instance.RequestProcessorInstance, hostname string) bool { + var server *ServerData + if inst != nil { + server = inst.GetCurrentServer() + } if server == nil { return false } diff --git a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go index 189503598..f217c89ce 100644 --- a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go +++ b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go @@ -3,14 +3,14 @@ package ssrf import ( "main/aikido_types" "main/context" - "main/globals" "main/helpers" + "main/instance" "testing" "go4.org/netipx" ) -func setupTestServerForBlockedDomains(blockNewOutgoingRequests bool, outboundDomains map[string]bool, bypassedIps *netipx.IPSet, requestIp string) func() { +func setupTestServerForBlockedDomains(blockNewOutgoingRequests bool, outboundDomains map[string]bool, bypassedIps *netipx.IPSet, requestIp string) (*instance.RequestProcessorInstance, func()) { // Normalize domain keys like config.go does at load time normalizedDomains := map[string]bool{} for domain, block := range outboundDomains { @@ -25,9 +25,9 @@ func setupTestServerForBlockedDomains(blockNewOutgoingRequests bool, outboundDom }, } - // Store original server and restore it later - originalServer := globals.GetCurrentServer() - globals.CurrentServer = server + // Create a test instance with the server + inst := instance.NewRequestProcessorInstance(0, false) + inst.SetCurrentServer(server) // Setup test context with request IP contextData := map[string]string{} @@ -36,10 +36,9 @@ func setupTestServerForBlockedDomains(blockNewOutgoingRequests bool, outboundDom } context.LoadForUnitTests(contextData) - // Return cleanup function - return func() { + // Return instance and cleanup function + return inst, func() { context.UnloadForUnitTests() - globals.CurrentServer = originalServer } } @@ -48,10 +47,10 @@ func TestIsBlockedOutboundDomain_ExplicitlyBlockedDomain(t *testing.T) { outboundDomains := map[string]bool{ "evil.com": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") if !isBlocked { t.Error("Expected blocked domain to be blocked, but it was allowed") @@ -65,10 +64,10 @@ func TestIsBlockedOutboundDomain_ExplicitlyBlockedDomainRegardlessOfFlag(t *test "evil.com": true, "safe.com": false, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") if !isBlocked { t.Error("Expected blocked domain to be blocked regardless of blockNewOutgoingRequests flag") @@ -80,10 +79,10 @@ func TestIsBlockedOutboundDomain_AllowedDomainWithBlockNewEnabled(t *testing.T) outboundDomains := map[string]bool{ "safe.com": false, } - cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("safe.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "safe.com") if isBlocked { t.Error("Expected allowed domain to be allowed when blockNewOutgoingRequests is true") @@ -95,10 +94,10 @@ func TestIsBlockedOutboundDomain_NewDomainBlockedWhenFlagEnabled(t *testing.T) { outboundDomains := map[string]bool{ "safe.com": false, } - cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("unknown.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "unknown.com") if !isBlocked { t.Error("Expected unknown domain to be blocked when blockNewOutgoingRequests is true") @@ -111,10 +110,10 @@ func TestIsBlockedOutboundDomain_NewDomainAllowedWhenFlagDisabled(t *testing.T) outboundDomains := map[string]bool{ "safe.com": false, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("unknown.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "unknown.com") if isBlocked { t.Error("Expected unknown domain to be allowed when blockNewOutgoingRequests is false") @@ -126,18 +125,18 @@ func TestIsBlockedOutboundDomain_CaseInsensitiveHostname(t *testing.T) { outboundDomains := map[string]bool{ "evil.com": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Test with uppercase hostname - isBlocked := IsBlockedOutboundDomain("EVIL.COM") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "EVIL.COM") if !isBlocked { t.Error("Expected uppercase hostname to be blocked (case-insensitive matching)") } // Test with mixed case - isBlocked = IsBlockedOutboundDomain("Evil.Com") + isBlocked = IsBlockedOutboundDomainWithInst(inst, "Evil.Com") if !isBlocked { t.Error("Expected mixed case hostname to be blocked (case-insensitive matching)") @@ -145,14 +144,11 @@ func TestIsBlockedOutboundDomain_CaseInsensitiveHostname(t *testing.T) { } func TestIsBlockedOutboundDomain_NoServerReturnsNil(t *testing.T) { - // Test that function returns nil when there's no server - originalServer := globals.GetCurrentServer() - globals.CurrentServer = nil - defer func() { - globals.CurrentServer = originalServer - }() + // Test that function returns false when there's no server + inst := instance.NewRequestProcessorInstance(0, false) + // Don't set a server, so inst.GetCurrentServer() will return nil - isBlocked := IsBlockedOutboundDomain("evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") if isBlocked { t.Error("Expected nil isBlocked when there's no server") @@ -162,10 +158,10 @@ func TestIsBlockedOutboundDomain_NoServerReturnsNil(t *testing.T) { func TestIsBlockedOutboundDomain_EmptyDomainsListWithBlockNewEnabled(t *testing.T) { // Test that all domains are blocked when the list is empty and blockNewOutgoingRequests is true outboundDomains := map[string]bool{} - cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("example.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "example.com") if !isBlocked { t.Error("Expected domain to be blocked when domains list is empty and blockNewOutgoingRequests is true") @@ -175,10 +171,10 @@ func TestIsBlockedOutboundDomain_EmptyDomainsListWithBlockNewEnabled(t *testing. func TestIsBlockedOutboundDomain_EmptyDomainsListWithBlockNewDisabled(t *testing.T) { // Test that all domains are allowed when the list is empty and blockNewOutgoingRequests is false outboundDomains := map[string]bool{} - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomain("example.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "example.com") if isBlocked { t.Error("Expected domain to be allowed when domains list is empty and blockNewOutgoingRequests is false") @@ -194,11 +190,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_BlockedUnicodeRequestedAsPunycod outboundDomains := map[string]bool{ "münchen.de": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Attacker tries to bypass by using Punycode encoding - isBlocked := IsBlockedOutboundDomain("xn--mnchen-3ya.de") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--mnchen-3ya.de") if !isBlocked { t.Error("Expected Punycode hostname to be blocked when Unicode equivalent is in blocked list") @@ -211,11 +207,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_BlockedPunycodeRequestedAsUnicod outboundDomains := map[string]bool{ "xn--mnchen-3ya.de": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Attacker tries to bypass by using Unicode - isBlocked := IsBlockedOutboundDomain("münchen.de") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "münchen.de") if !isBlocked { t.Error("Expected Unicode hostname to be blocked when Punycode equivalent is in blocked list") @@ -227,11 +223,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_AllowedUnicodeRequestedAsPunycod outboundDomains := map[string]bool{ "münchen.de": false, } - cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() // Request using Punycode encoding - isBlocked := IsBlockedOutboundDomain("xn--mnchen-3ya.de") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--mnchen-3ya.de") if isBlocked { t.Error("Expected Punycode hostname to be allowed when Unicode equivalent is in allow list") @@ -243,11 +239,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_AllowedPunycodeRequestedAsUnicod outboundDomains := map[string]bool{ "xn--mnchen-3ya.de": false, } - cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() // Request using Unicode - isBlocked := IsBlockedOutboundDomain("münchen.de") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "münchen.de") if isBlocked { t.Error("Expected Unicode hostname to be allowed when Punycode equivalent is in allow list") @@ -259,11 +255,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_MixedSubdomains(t *testing.T) { outboundDomains := map[string]bool{ "böse.evil.com": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Attacker tries with Punycode subdomain (xn--bse-sna = böse) - isBlocked := IsBlockedOutboundDomain("xn--bse-sna.evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--bse-sna.evil.com") if !isBlocked { t.Error("Expected Punycode subdomain to be blocked when Unicode equivalent is in blocked list") @@ -275,11 +271,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_RussianDomain(t *testing.T) { outboundDomains := map[string]bool{ "москва.ru": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Attacker tries with Punycode - isBlocked := IsBlockedOutboundDomain("xn--80adxhks.ru") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--80adxhks.ru") if !isBlocked { t.Error("Expected Punycode Cyrillic hostname to be blocked when Unicode equivalent is in blocked list") @@ -291,11 +287,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_ChineseDomain(t *testing.T) { outboundDomains := map[string]bool{ "中文.com": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Attacker tries with Punycode - isBlocked := IsBlockedOutboundDomain("xn--fiq228c.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--fiq228c.com") if !isBlocked { t.Error("Expected Punycode Chinese hostname to be blocked when Unicode equivalent is in blocked list") @@ -307,11 +303,11 @@ func TestIsBlockedOutboundDomain_PunycodeBypass_WithPortStripped(t *testing.T) { outboundDomains := map[string]bool{ "münchen.de": true, } - cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") + inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() // Just the hostname without port - isBlocked := IsBlockedOutboundDomain("xn--mnchen-3ya.de") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "xn--mnchen-3ya.de") if !isBlocked { t.Error("Expected Punycode hostname to be blocked") From 0f9fd6980883232e3267ef4ec43fef16f5cb735f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 04:01:10 +0000 Subject: [PATCH 118/170] Refactor test cases in checkBlockedDomain_test.go to remove unnecessary WithInst function calls, simplifying the domain checks for outbound blocking. --- .../ssrf/checkBlockedDomain_test.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go index f217c89ce..bf6ebbb68 100644 --- a/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go +++ b/lib/request-processor/vulnerabilities/ssrf/checkBlockedDomain_test.go @@ -50,7 +50,7 @@ func TestIsBlockedOutboundDomain_ExplicitlyBlockedDomain(t *testing.T) { inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "evil.com") if !isBlocked { t.Error("Expected blocked domain to be blocked, but it was allowed") @@ -67,7 +67,7 @@ func TestIsBlockedOutboundDomain_ExplicitlyBlockedDomainRegardlessOfFlag(t *test inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "evil.com") if !isBlocked { t.Error("Expected blocked domain to be blocked regardless of blockNewOutgoingRequests flag") @@ -82,7 +82,7 @@ func TestIsBlockedOutboundDomain_AllowedDomainWithBlockNewEnabled(t *testing.T) inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "safe.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "safe.com") if isBlocked { t.Error("Expected allowed domain to be allowed when blockNewOutgoingRequests is true") @@ -97,7 +97,7 @@ func TestIsBlockedOutboundDomain_NewDomainBlockedWhenFlagEnabled(t *testing.T) { inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "unknown.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "unknown.com") if !isBlocked { t.Error("Expected unknown domain to be blocked when blockNewOutgoingRequests is true") @@ -113,7 +113,7 @@ func TestIsBlockedOutboundDomain_NewDomainAllowedWhenFlagDisabled(t *testing.T) inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "unknown.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "unknown.com") if isBlocked { t.Error("Expected unknown domain to be allowed when blockNewOutgoingRequests is false") @@ -129,7 +129,7 @@ func TestIsBlockedOutboundDomain_CaseInsensitiveHostname(t *testing.T) { defer cleanup() // Test with uppercase hostname - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "EVIL.COM") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "EVIL.COM") if !isBlocked { t.Error("Expected uppercase hostname to be blocked (case-insensitive matching)") @@ -148,7 +148,7 @@ func TestIsBlockedOutboundDomain_NoServerReturnsNil(t *testing.T) { inst := instance.NewRequestProcessorInstance(0, false) // Don't set a server, so inst.GetCurrentServer() will return nil - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "evil.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "evil.com") if isBlocked { t.Error("Expected nil isBlocked when there's no server") @@ -161,7 +161,7 @@ func TestIsBlockedOutboundDomain_EmptyDomainsListWithBlockNewEnabled(t *testing. inst, cleanup := setupTestServerForBlockedDomains(true, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "example.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "example.com") if !isBlocked { t.Error("Expected domain to be blocked when domains list is empty and blockNewOutgoingRequests is true") @@ -174,7 +174,7 @@ func TestIsBlockedOutboundDomain_EmptyDomainsListWithBlockNewDisabled(t *testing inst, cleanup := setupTestServerForBlockedDomains(false, outboundDomains, nil, "") defer cleanup() - isBlocked := IsBlockedOutboundDomainWithInst(inst,WithInst(inst, "example.com") + isBlocked := IsBlockedOutboundDomainWithInst(inst, "example.com") if isBlocked { t.Error("Expected domain to be allowed when domains list is empty and blockNewOutgoingRequests is false") From 19515e1fd347153bbcff33efe4a51bacb1aab804 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sat, 13 Dec 2025 04:08:12 +0000 Subject: [PATCH 119/170] Remove unused urllib.request import and related code for checking FrankenPHP admin API readiness in frankenphp_worker. --- tools/server_tests/frankenphp_worker/main.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tools/server_tests/frankenphp_worker/main.py b/tools/server_tests/frankenphp_worker/main.py index bf0aae306..9d300c0b9 100644 --- a/tools/server_tests/frankenphp_worker/main.py +++ b/tools/server_tests/frankenphp_worker/main.py @@ -1,7 +1,6 @@ import os import subprocess import time -import urllib.request frankenphp_bin = "frankenphp" caddyfile_path = "/tmp/frankenphp_worker_test.caddyfile" @@ -146,15 +145,6 @@ def frankenphp_worker_pre_tests(tests_data): if not result.stdout.strip(): raise RuntimeError("FrankenPHP worker failed to start!") - for i in range(30): - try: - urllib.request.urlopen('http://localhost:2019/config', timeout=1) - break - except: - time.sleep(1) - else: - raise RuntimeError("FrankenPHP admin API not ready!") - print(f"FrankenPHP worker started with {threads} threads for {len(tests_data)} tests") def frankenphp_worker_start_server(test_data, test_lib_dir, valgrind): From d1cc20b936ec54f0537483b04ad050e82c4301ef Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 03:52:43 +0200 Subject: [PATCH 120/170] Add --disable-zend-signals option to PHP configuration in CentOS and Ubuntu Dockerfiles --- .github/workflows/Dockerfile.centos-php-test-zts | 1 + .github/workflows/Dockerfile.ubuntu-php-test-zts | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 41ea2fb24..b42b1f8f3 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -133,6 +133,7 @@ RUN if [ -f /usr/local/lib64/liblexbor_static.a ]; then \ --with-openssl \ --with-zlib \ --with-zip \ + --disable-zend-signals \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index a194be7e8..97c4bb14a 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -126,6 +126,7 @@ RUN if [ -f /usr/local/lib/liblexbor_static.a ]; then \ --with-openssl \ --with-zlib \ --with-zip \ + --disable-zend-signals \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From 7d532f0f13c715c380cee646879bf8299ba02287 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 02:58:05 +0000 Subject: [PATCH 121/170] Add Dockerfile for CentOS Stream 9 with PHP 8.3 built from source in ZTS mode, including necessary build dependencies and configurations for PHP-FPM and testing environment. --- .devcontainer/centos_php_test_zts/Dockerfile | 198 ++++++++++++++++++ .../workflows/Dockerfile.centos-php-test-zts | 24 +-- 2 files changed, 199 insertions(+), 23 deletions(-) create mode 100644 .devcontainer/centos_php_test_zts/Dockerfile diff --git a/.devcontainer/centos_php_test_zts/Dockerfile b/.devcontainer/centos_php_test_zts/Dockerfile new file mode 100644 index 000000000..405055257 --- /dev/null +++ b/.devcontainer/centos_php_test_zts/Dockerfile @@ -0,0 +1,198 @@ +# syntax=docker/dockerfile:1.7 +# CentOS Stream 9 test image with PHP built from source in ZTS mode +# Used for testing the extension with FrankenPHP and other ZTS environments + +ARG BASE_IMAGE=quay.io/centos/centos:stream9 +ARG PHP_VERSION=8.3 +ARG PHP_SRC_REF=PHP-${PHP_VERSION} + +FROM ${BASE_IMAGE} AS base +SHELL ["/bin/bash", "-euo", "pipefail", "-c"] + +ARG PHP_VERSION +ENV TZ=Etc/UTC \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + PHP_VERSION=${PHP_VERSION} + +RUN yum install -y yum-utils && \ + dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true + +# Install minimal tools needed for re2c build (replace curl-minimal with full curl) +RUN yum install -y xz tar gcc gcc-c++ make cmake + +ENV RE2C_VERSION=3.1 +RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ + && mkdir -p /tmp/re2c-src \ + && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ + && cd /tmp/re2c-src \ + && ./configure \ + && make -j"$(nproc)" \ + && make install \ + && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz + +# Install remaining build dependencies and tools +RUN yum install -y autoconf bison pkgconfig \ + libxml2-devel sqlite-devel libcurl-devel openssl-devel \ + libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ + libicu-devel readline-devel libxslt-devel \ + git wget \ + python3 python3-devel python3-pip \ + nginx httpd httpd-devel procps-ng mysql-server \ + && yum clean all + +# Install mariadb-devel separately (may need different repo or skip if not critical) +RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" + +# NOTE: Lexbor is BUNDLED in PHP source at ext/lexbor/ since PHP 8.4+ +# No external build needed - PHP compiles it from bundled sources automatically + +# Fetch and build PHP from source with ZTS +FROM base AS php-build +ARG PHP_SRC_REF +WORKDIR /usr/src +RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git +WORKDIR /usr/src/php-src + +RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac +RUN ./buildconf --force + +# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) +RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ + awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ + indent = $0; \ + gsub(/[^[:space:]].*/, "", indent); \ + print indent "#ifdef RSA_SSLV23_PADDING"; \ + gsub(/^[[:space:]]*/, indent " "); \ + print; \ + print indent "#endif"; \ + next \ + } \ + { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ + mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ + fi || true + +RUN mkdir -p /usr/local/etc/php/conf.d + +# Build PHP with ZTS enabled (lexbor built from bundled sources in ext/lexbor/) +RUN ./configure \ + --prefix=/usr/local \ + --with-config-file-path=/usr/local/lib \ + --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ + --enable-zts \ + --enable-maintainer-zts \ + --enable-fpm \ + --enable-mbstring \ + --enable-pcntl \ + --enable-cgi \ + --enable-embed \ + --enable-dom \ + --enable-xml \ + --enable-simplexml \ + --enable-xmlreader \ + --enable-xmlwriter \ + --with-xsl \ + --with-extra-version="" \ + --with-curl \ + --with-mysqli \ + --with-openssl \ + --with-zlib \ + --with-zip \ + --disable-zend-signals \ +&& make -j"$(nproc)" \ +&& make install \ +&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true + +WORKDIR /usr/src +RUN git clone https://github.com/e-dant/watcher.git +WORKDIR /usr/src/watcher +RUN cmake -S . -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DBUILD_LIB=ON \ + -DBUILD_BIN=ON \ + -DBUILD_HDR=ON && \ + cmake --build build && \ + cmake --install build && \ + ldconfig /usr/local/lib /usr/local/lib64 + +FROM base AS final + +COPY --from=php-build /usr/local /usr/local + +RUN ldconfig /usr/local/lib /usr/local/lib64 + +# Create FrankenPHP directories (binary installed separately or not at all for devcontainer) +RUN mkdir -p /etc/frankenphp/caddy.d && \ + mkdir -p /etc/frankenphp/php.d && \ + mkdir -p /usr/lib/frankenphp/modules + +RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ + if [ -z "$EXTENSION_DIR" ]; then \ + echo "Error: Could not determine extension_dir"; \ + exit 1; \ + fi && \ + mkdir -p "$EXTENSION_DIR" && \ + echo "Created extension_dir: $EXTENSION_DIR" + +# Verify ZTS is enabled +RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ + php -m | grep -E 'curl|mysqli' >/dev/null + +ENV PATH="/usr/local/bin:${PATH}" \ + LD_LIBRARY_PATH="/usr/local/lib:/usr/local/lib64:${LD_LIBRARY_PATH:-}" + +RUN ln -sf /usr/local/bin/php /usr/bin/php && \ + ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ + ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi + +RUN mkdir -p /etc/php-fpm.d && \ + mkdir -p /run/php-fpm && \ + mkdir -p /var/run && \ + mkdir -p /var/log/php-fpm && \ + mkdir -p /etc/httpd || true && \ + mkdir -p /usr/local/etc/php-fpm.d && \ + mkdir -p /usr/local/etc/php/conf.d && \ + ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ + + echo "[global]" > /usr/local/etc/php-fpm.conf && \ + echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ + echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ + echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ + + echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ + echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ + echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ + + php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ + php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ + (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ + PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ + echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ + echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ + echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ + exit 1) && \ + + ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ + ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true + +RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini + + +# Python deps used by test harness +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir flask requests psutil + +# Quality-of-life +WORKDIR /work +CMD ["bash"] diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index b42b1f8f3..404f466a2 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -44,29 +44,6 @@ RUN yum install -y autoconf bison pkgconfig \ # Install mariadb-devel separately (may need different repo or skip if not critical) RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" -# Build and install lexbor library (STATIC - embedded into libphp.so) - only for PHP 8.4+ -ARG PHP_VERSION -ENV LEXBOR_VERSION=2.4.0 -RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ - echo "Building lexbor for PHP ${PHP_VERSION} (8.4+)"; \ - git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ - cd /tmp/lexbor-src && \ - cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DLEXBOR_BUILD_SHARED=OFF \ - -DLEXBOR_BUILD_STATIC=ON \ - -DLEXBOR_INSTALL_HEADERS=ON && \ - cmake --build build && \ - cmake --install build && \ - echo "Lexbor STATIC library installed:" && \ - ls -la /usr/local/lib*/liblexbor* || true && \ - ls -la /usr/local/include/lexbor/ || true && \ - rm -rf /tmp/lexbor-src; \ - else \ - echo "Skipping lexbor build for PHP ${PHP_VERSION} (< 8.4)"; \ - fi - # Fetch and build PHP from source with ZTS FROM base AS php-build ARG PHP_SRC_REF @@ -134,6 +111,7 @@ RUN if [ -f /usr/local/lib64/liblexbor_static.a ]; then \ --with-zlib \ --with-zip \ --disable-zend-signals \ + --enable-zend-max-execution-timers \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From f7f6ac395a5bd80d6b715a97dabe1c843fcb9db1 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 02:58:53 +0000 Subject: [PATCH 122/170] Remove lexbor library build from Dockerfile for Ubuntu PHP ZTS, and add --enable-zend-max-execution-timers option to PHP configuration. --- .../workflows/Dockerfile.ubuntu-php-test-zts | 24 +------------------ 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 97c4bb14a..c33c5a351 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -33,29 +33,6 @@ RUN ln -fs /usr/share/zoneinfo/${TZ} /etc/localtime && \ echo "${TZ}" > /etc/timezone && \ dpkg-reconfigure -f noninteractive tzdata -# Build and install lexbor library (STATIC - embedded into libphp.so) - only for PHP 8.4+ -ARG PHP_VERSION -ENV LEXBOR_VERSION=2.4.0 -RUN if [ "$(echo "${PHP_VERSION}" | cut -d. -f1)" -ge 8 ] && [ "$(echo "${PHP_VERSION}" | cut -d. -f2)" -ge 4 ]; then \ - echo "Building lexbor for PHP ${PHP_VERSION} (8.4+)"; \ - git clone --depth 1 --branch v${LEXBOR_VERSION} https://github.com/lexbor/lexbor.git /tmp/lexbor-src && \ - cd /tmp/lexbor-src && \ - cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DLEXBOR_BUILD_SHARED=OFF \ - -DLEXBOR_BUILD_STATIC=ON \ - -DLEXBOR_INSTALL_HEADERS=ON && \ - cmake --build build && \ - cmake --install build && \ - echo "Lexbor STATIC library installed:" && \ - ls -la /usr/local/lib*/liblexbor* || true && \ - ls -la /usr/local/include/lexbor/ || true && \ - rm -rf /tmp/lexbor-src; \ - else \ - echo "Skipping lexbor build for PHP ${PHP_VERSION} (< 8.4)"; \ - fi - # Fetch and build PHP from source with ZTS FROM base AS php-build ARG PHP_SRC_REF @@ -127,6 +104,7 @@ RUN if [ -f /usr/local/lib/liblexbor_static.a ]; then \ --with-zlib \ --with-zip \ --disable-zend-signals \ + --enable-zend-max-execution-timers \ && make -j"$(nproc)" \ && make install \ && strip /usr/local/bin/php /usr/local/sbin/php-fpm || true From 46f86bb05ba952e169883dae8ac8e80cf525ceb1 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 03:42:23 +0000 Subject: [PATCH 123/170] Refactor exception handling in Action::executeThrow to streamline response code and exception throwing order. --- lib/php-extension/Action.cpp | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index a356e3a2a..50941b05e 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -4,17 +4,8 @@ ACTION_STATUS Action::executeThrow(json &event) { int _code = event["code"].get(); std::string _message = event["message"].get(); - const auto& sapiName = AIKIDO_GLOBAL(sapi_name); - - // For frankenphp, throw exception first; for others (cli-server, apache2handler, etc.), set response code first - if (sapiName == "frankenphp") { - zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); - CallPhpFunctionWithOneParam("http_response_code", _code); - - } else { - CallPhpFunctionWithOneParam("http_response_code", _code); - zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); - } + CallPhpFunctionWithOneParam("http_response_code", _code); + zend_throw_exception(GetFirewallDefaultExceptionCe(), _message.c_str(), _code); return BLOCK; } From beedab5f6ec21dd841ddb7af20dc5acf231fbb60 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 04:20:07 +0000 Subject: [PATCH 124/170] . --- .devcontainer/centos_php_test_zts/Dockerfile | 6 ++++-- .github/workflows/Dockerfile.centos-php-test-zts | 4 +++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.devcontainer/centos_php_test_zts/Dockerfile b/.devcontainer/centos_php_test_zts/Dockerfile index 405055257..89b086a74 100644 --- a/.devcontainer/centos_php_test_zts/Dockerfile +++ b/.devcontainer/centos_php_test_zts/Dockerfile @@ -3,7 +3,7 @@ # Used for testing the extension with FrankenPHP and other ZTS environments ARG BASE_IMAGE=quay.io/centos/centos:stream9 -ARG PHP_VERSION=8.3 +ARG PHP_VERSION=8.5 ARG PHP_SRC_REF=PHP-${PHP_VERSION} FROM ${BASE_IMAGE} AS base @@ -120,7 +120,9 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN ldconfig /usr/local/lib /usr/local/lib64 +RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ + echo "/usr/local/lib64" >> /etc/ld.so.conf.d/usr-local-lib.conf && \ + ldconfig # Create FrankenPHP directories (binary installed separately or not at all for devcontainer) RUN mkdir -p /etc/frankenphp/caddy.d && \ diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 404f466a2..09400989c 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -144,7 +144,9 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN ldconfig /usr/local/lib /usr/local/lib64 +RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ + echo "/usr/local/lib64" >> /etc/ld.so.conf.d/usr-local-lib.conf && \ + ldconfig COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ From 1faaeb15f97e0841e1b8523efc790017f6b2cd88 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 04:20:47 +0000 Subject: [PATCH 125/170] Update Dockerfile for Ubuntu PHP ZTS to configure library path for ldconfig --- .github/workflows/Dockerfile.ubuntu-php-test-zts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index c33c5a351..7864e4173 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -138,7 +138,8 @@ FROM base AS final COPY --from=php-build /usr/local /usr/local -RUN ldconfig +RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ + ldconfig COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ From 97545146bcde232dcf751dec8a8f0b977cddeed4 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 15:11:41 +0200 Subject: [PATCH 126/170] Remove Dockerfile for CentOS PHP ZTS and update paths for FrankenPHP binary in Ubuntu and CentOS workflows to ensure consistent installation location. --- .devcontainer/centos_php_test_zts/Dockerfile | 200 ------------------ .../workflows/Dockerfile.centos-php-test-zts | 4 +- .../workflows/Dockerfile.ubuntu-php-test-zts | 12 +- tools/server_tests/frankenphp_classic/main.py | 26 ++- tools/server_tests/frankenphp_worker/main.py | 29 ++- 5 files changed, 45 insertions(+), 226 deletions(-) delete mode 100644 .devcontainer/centos_php_test_zts/Dockerfile diff --git a/.devcontainer/centos_php_test_zts/Dockerfile b/.devcontainer/centos_php_test_zts/Dockerfile deleted file mode 100644 index 89b086a74..000000000 --- a/.devcontainer/centos_php_test_zts/Dockerfile +++ /dev/null @@ -1,200 +0,0 @@ -# syntax=docker/dockerfile:1.7 -# CentOS Stream 9 test image with PHP built from source in ZTS mode -# Used for testing the extension with FrankenPHP and other ZTS environments - -ARG BASE_IMAGE=quay.io/centos/centos:stream9 -ARG PHP_VERSION=8.5 -ARG PHP_SRC_REF=PHP-${PHP_VERSION} - -FROM ${BASE_IMAGE} AS base -SHELL ["/bin/bash", "-euo", "pipefail", "-c"] - -ARG PHP_VERSION -ENV TZ=Etc/UTC \ - LC_ALL=C.UTF-8 \ - LANG=C.UTF-8 \ - PHP_VERSION=${PHP_VERSION} - -RUN yum install -y yum-utils && \ - dnf config-manager --set-enabled crb || dnf config-manager --set-enabled powertools || true - -# Install minimal tools needed for re2c build (replace curl-minimal with full curl) -RUN yum install -y xz tar gcc gcc-c++ make cmake - -ENV RE2C_VERSION=3.1 -RUN curl -fsSL -o /tmp/re2c.tar.xz https://github.com/skvadrik/re2c/releases/download/${RE2C_VERSION}/re2c-${RE2C_VERSION}.tar.xz \ - && mkdir -p /tmp/re2c-src \ - && tar -xJf /tmp/re2c.tar.xz -C /tmp/re2c-src --strip-components=1 \ - && cd /tmp/re2c-src \ - && ./configure \ - && make -j"$(nproc)" \ - && make install \ - && rm -rf /tmp/re2c-src /tmp/re2c.tar.xz - -# Install remaining build dependencies and tools -RUN yum install -y autoconf bison pkgconfig \ - libxml2-devel sqlite-devel libcurl-devel openssl-devel \ - libzip-devel oniguruma-devel libjpeg-turbo-devel libpng-devel libwebp-devel \ - libicu-devel readline-devel libxslt-devel \ - git wget \ - python3 python3-devel python3-pip \ - nginx httpd httpd-devel procps-ng mysql-server \ - && yum clean all - -# Install mariadb-devel separately (may need different repo or skip if not critical) -RUN yum install -y mariadb-devel || yum install -y mariadb-connector-c-devel || echo "Warning: mariadb-devel not available, continuing without it" - -# NOTE: Lexbor is BUNDLED in PHP source at ext/lexbor/ since PHP 8.4+ -# No external build needed - PHP compiles it from bundled sources automatically - -# Fetch and build PHP from source with ZTS -FROM base AS php-build -ARG PHP_SRC_REF -WORKDIR /usr/src -RUN git clone --depth 1 --branch "${PHP_SRC_REF}" https://github.com/php/php-src.git -WORKDIR /usr/src/php-src - -RUN sed -i 's/\[\([0-9]\+\.[0-9]\+\.[0-9]\+\)-dev\]/[\1]/' configure.ac -RUN ./buildconf --force - -# Patch openssl.c for OpenSSL compatibility (RSA_SSLV23_PADDING may not be available in newer OpenSSL) -RUN if [ -f ext/openssl/openssl.c ] && grep -q 'REGISTER_LONG_CONSTANT("OPENSSL_SSLV23_PADDING"' ext/openssl/openssl.c; then \ - awk '/REGISTER_LONG_CONSTANT\("OPENSSL_SSLV23_PADDING"/ { \ - indent = $0; \ - gsub(/[^[:space:]].*/, "", indent); \ - print indent "#ifdef RSA_SSLV23_PADDING"; \ - gsub(/^[[:space:]]*/, indent " "); \ - print; \ - print indent "#endif"; \ - next \ - } \ - { print }' ext/openssl/openssl.c > ext/openssl/openssl.c.new && \ - mv ext/openssl/openssl.c.new ext/openssl/openssl.c; \ - fi || true - -RUN mkdir -p /usr/local/etc/php/conf.d - -# Build PHP with ZTS enabled (lexbor built from bundled sources in ext/lexbor/) -RUN ./configure \ - --prefix=/usr/local \ - --with-config-file-path=/usr/local/lib \ - --with-config-file-scan-dir=/usr/local/etc/php/conf.d \ - --enable-zts \ - --enable-maintainer-zts \ - --enable-fpm \ - --enable-mbstring \ - --enable-pcntl \ - --enable-cgi \ - --enable-embed \ - --enable-dom \ - --enable-xml \ - --enable-simplexml \ - --enable-xmlreader \ - --enable-xmlwriter \ - --with-xsl \ - --with-extra-version="" \ - --with-curl \ - --with-mysqli \ - --with-openssl \ - --with-zlib \ - --with-zip \ - --disable-zend-signals \ -&& make -j"$(nproc)" \ -&& make install \ -&& strip /usr/local/bin/php /usr/local/sbin/php-fpm || true - -WORKDIR /usr/src -RUN git clone https://github.com/e-dant/watcher.git -WORKDIR /usr/src/watcher -RUN cmake -S . -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DCMAKE_INSTALL_PREFIX=/usr/local \ - -DBUILD_LIB=ON \ - -DBUILD_BIN=ON \ - -DBUILD_HDR=ON && \ - cmake --build build && \ - cmake --install build && \ - ldconfig /usr/local/lib /usr/local/lib64 - -FROM base AS final - -COPY --from=php-build /usr/local /usr/local - -RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ - echo "/usr/local/lib64" >> /etc/ld.so.conf.d/usr-local-lib.conf && \ - ldconfig - -# Create FrankenPHP directories (binary installed separately or not at all for devcontainer) -RUN mkdir -p /etc/frankenphp/caddy.d && \ - mkdir -p /etc/frankenphp/php.d && \ - mkdir -p /usr/lib/frankenphp/modules - -RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ - if [ -z "$EXTENSION_DIR" ]; then \ - echo "Error: Could not determine extension_dir"; \ - exit 1; \ - fi && \ - mkdir -p "$EXTENSION_DIR" && \ - echo "Created extension_dir: $EXTENSION_DIR" - -# Verify ZTS is enabled -RUN php -v | grep -q "ZTS" || (echo "ERROR: ZTS not enabled!" && exit 1) && \ - php -m | grep -E 'curl|mysqli' >/dev/null - -ENV PATH="/usr/local/bin:${PATH}" \ - LD_LIBRARY_PATH="/usr/local/lib:/usr/local/lib64:${LD_LIBRARY_PATH:-}" - -RUN ln -sf /usr/local/bin/php /usr/bin/php && \ - ln -sf /usr/local/sbin/php-fpm /usr/sbin/php-fpm || true && \ - ln -sf /usr/local/bin/php-cgi /usr/bin/php-cgi - -RUN mkdir -p /etc/php-fpm.d && \ - mkdir -p /run/php-fpm && \ - mkdir -p /var/run && \ - mkdir -p /var/log/php-fpm && \ - mkdir -p /etc/httpd || true && \ - mkdir -p /usr/local/etc/php-fpm.d && \ - mkdir -p /usr/local/etc/php/conf.d && \ - ln -sf /usr/local/etc/php/conf.d /etc/php.d || true && \ - - echo "[global]" > /usr/local/etc/php-fpm.conf && \ - echo "pid = /run/php-fpm/php-fpm.pid" >> /usr/local/etc/php-fpm.conf && \ - echo "error_log = /var/log/php-fpm/error.log" >> /usr/local/etc/php-fpm.conf && \ - echo "daemonize = yes" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/usr/local/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - echo "include=/etc/php-fpm.d/*.conf" >> /usr/local/etc/php-fpm.conf && \ - - echo "[www]" > /usr/local/etc/php-fpm.d/www.conf && \ - echo "user = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen = 127.0.0.1:9000" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.owner = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "listen.group = root" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm = dynamic" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_children = 5" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.start_servers = 2" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.min_spare_servers = 1" >> /usr/local/etc/php-fpm.d/www.conf && \ - echo "pm.max_spare_servers = 3" >> /usr/local/etc/php-fpm.d/www.conf && \ - - php-fpm -t -y /usr/local/etc/php-fpm.conf 2>&1 | grep -v "Nothing matches the include pattern" || true && \ - php-fpm -t -y /usr/local/etc/php-fpm.conf >/dev/null 2>&1 || \ - (PHP_MAJOR=$(php -r 'echo PHP_MAJOR_VERSION;') && \ - PHP_MINOR=$(php -r 'echo PHP_MINOR_VERSION;') && \ - echo "PHP-FPM config test failed for PHP ${PHP_MAJOR}.${PHP_MINOR}" && \ - echo "Config file contents:" && cat /usr/local/etc/php-fpm.conf && \ - echo "Pool config:" && cat /usr/local/etc/php-fpm.d/www.conf && \ - exit 1) && \ - - ln -sf /usr/local/etc/php-fpm.conf /etc/php-fpm.conf && \ - ln -sf /usr/local/etc/php-fpm.d/www.conf /etc/php-fpm.d/www.conf || true - -RUN echo "mysqli.default_socket = /var/lib/mysql/mysql.sock" > /usr/local/etc/php/conf.d/mysql-socket.ini - - -# Python deps used by test harness -RUN python3 -m pip install --no-cache-dir --upgrade pip && \ - python3 -m pip install --no-cache-dir flask requests psutil - -# Quality-of-life -WORKDIR /work -CMD ["bash"] diff --git a/.github/workflows/Dockerfile.centos-php-test-zts b/.github/workflows/Dockerfile.centos-php-test-zts index 09400989c..d65b1312f 100644 --- a/.github/workflows/Dockerfile.centos-php-test-zts +++ b/.github/workflows/Dockerfile.centos-php-test-zts @@ -150,8 +150,8 @@ RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ - cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - chmod +x /usr/local/bin/frankenphp && \ + cp /tmp/frankenphp-binary/frankenphp /usr/bin/frankenphp && \ + chmod +x /usr/bin/frankenphp && \ mkdir -p /etc/frankenphp/caddy.d && \ mkdir -p /etc/frankenphp/php.d && \ mkdir -p /usr/lib/frankenphp/modules; \ diff --git a/.github/workflows/Dockerfile.ubuntu-php-test-zts b/.github/workflows/Dockerfile.ubuntu-php-test-zts index 7864e4173..1d63d6b6d 100644 --- a/.github/workflows/Dockerfile.ubuntu-php-test-zts +++ b/.github/workflows/Dockerfile.ubuntu-php-test-zts @@ -143,12 +143,12 @@ RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usr-local-lib.conf && \ COPY frankenphp-binary/ /tmp/frankenphp-binary/ RUN if [ -f /tmp/frankenphp-binary/frankenphp ]; then \ - cp /tmp/frankenphp-binary/frankenphp /usr/local/bin/frankenphp && \ - chmod +x /usr/local/bin/frankenphp && \ - mkdir -p /etc/frankenphp/caddy.d && \ - mkdir -p /etc/frankenphp/php.d && \ - mkdir -p /usr/lib/frankenphp/modules; \ - fi + cp /tmp/frankenphp-binary/frankenphp /usr/bin/frankenphp && \ + chmod +x /usr/bin/frankenphp && \ + mkdir -p /etc/frankenphp/caddy.d && \ + mkdir -p /etc/frankenphp/php.d && \ + mkdir -p /usr/lib/frankenphp/modules; \ + fi RUN EXTENSION_DIR=$(php -i | grep "^extension_dir" | awk '{print $3}') && \ diff --git a/tools/server_tests/frankenphp_classic/main.py b/tools/server_tests/frankenphp_classic/main.py index 63c9550e0..8aaac05ba 100644 --- a/tools/server_tests/frankenphp_classic/main.py +++ b/tools/server_tests/frankenphp_classic/main.py @@ -62,16 +62,26 @@ def frankenphp_classic_pre_tests(tests_data): for test_data in tests_data: f.write("\n" + test_data["site_block"]) - subprocess.Popen( - [frankenphp_bin, 'run', '--config', caddyfile_path] - ) - time.sleep(20) - - result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) - if not result.stdout.strip(): - raise RuntimeError("FrankenPHP classic failed to start!") + print(f"Caddyfile prepared for {len(tests_data)} tests with {threads} threads") def frankenphp_classic_start_server(test_data, test_lib_dir, valgrind): + result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + + if not result.stdout.strip(): + print("Starting FrankenPHP classic server...") + process = subprocess.Popen( + [frankenphp_bin, 'run', '--config', caddyfile_path] + ) + time.sleep(2) + + result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + if not result.stdout.strip(): + raise RuntimeError("FrankenPHP classic failed to spawn!") + + print("FrankenPHP classic process started") + else: + print("FrankenPHP classic is already running") + return None def frankenphp_classic_uninit(): diff --git a/tools/server_tests/frankenphp_worker/main.py b/tools/server_tests/frankenphp_worker/main.py index 9d300c0b9..e310e9648 100644 --- a/tools/server_tests/frankenphp_worker/main.py +++ b/tools/server_tests/frankenphp_worker/main.py @@ -128,7 +128,7 @@ def frankenphp_worker_pre_tests(tests_data): total_workers = len(tests_data) threads = total_workers * 3 - + with open(caddyfile_path, 'w') as f: base_template = get_caddyfile_base_template() if base_template: @@ -136,18 +136,27 @@ def frankenphp_worker_pre_tests(tests_data): for test_data in tests_data: f.write("\n" + test_data["site_block"]) - process = subprocess.Popen( - [frankenphp_bin, 'run', '--config', caddyfile_path] - ) - time.sleep(20) - + print(f"Caddyfile prepared for {len(tests_data)} tests with {threads} threads") + return threads + +def frankenphp_worker_start_server(test_data, test_lib_dir, valgrind): result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + if not result.stdout.strip(): - raise RuntimeError("FrankenPHP worker failed to start!") + print("Starting FrankenPHP worker server...") + process = subprocess.Popen( + [frankenphp_bin, 'run', '--config', caddyfile_path] + ) + time.sleep(2) + + result = subprocess.run(['pgrep', '-x', 'frankenphp'], capture_output=True, text=True) + if not result.stdout.strip(): + raise RuntimeError("FrankenPHP worker failed to spawn!") + + print("FrankenPHP worker process started") + else: + print("FrankenPHP worker is already running") - print(f"FrankenPHP worker started with {threads} threads for {len(tests_data)} tests") - -def frankenphp_worker_start_server(test_data, test_lib_dir, valgrind): return None def frankenphp_worker_uninit(): From d7adf452c8d15c7e27ad80f599c357d5ed5ccf79 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 23:52:47 +0000 Subject: [PATCH 127/170] Fix rate limiting race condition with atomic check-and-increment Changed getRateLimitingStatus to use exclusive lock and increment immediately, preventing concurrent threads from exceeding limits. Fix some tests. --- lib/agent/aikido_types/stats.go | 1 + lib/agent/grpc/request.go | 24 ++---- lib/agent/grpc/server.go | 1 - lib/agent/rate_limiting/rate_limiting.go | 11 ++- lib/php-extension/Environment.cpp | 21 ++++- tests/server/test_domains_limits/test.py | 4 +- tests/server/test_routes_limits/test.py | 4 +- tests/server/test_tor_monitored_ip/index.php | 2 + .../test_user_agent_monitored/index.php | 2 + tests/server/test_user_limits/test.py | 4 +- tests/testlib/testlib.py | 84 +++++++++++++++++++ tools/run_server_tests.py | 70 ++++++++++++++-- 12 files changed, 195 insertions(+), 33 deletions(-) diff --git a/lib/agent/aikido_types/stats.go b/lib/agent/aikido_types/stats.go index 9fd059ee1..2c9072671 100644 --- a/lib/agent/aikido_types/stats.go +++ b/lib/agent/aikido_types/stats.go @@ -48,6 +48,7 @@ type RateLimitingValue struct { UserCounts map[string]*SlidingWindow IpCounts map[string]*SlidingWindow RateLimitGroupCounts map[string]*SlidingWindow + Mutex sync.Mutex } type RateLimitingWildcardValue struct { diff --git a/lib/agent/grpc/request.go b/lib/agent/grpc/request.go index f659f1e8a..87bcb182f 100644 --- a/lib/agent/grpc/request.go +++ b/lib/agent/grpc/request.go @@ -176,20 +176,6 @@ func incrementSlidingWindowEntry(m map[string]*SlidingWindow, key string) *Slidi return entry } -func updateRateLimitingCounts(server *ServerData, method string, route string, routeParsed string, user string, ip string, rateLimitGroup string) { - server.RateLimitingMutex.Lock() - defer server.RateLimitingMutex.Unlock() - - rateLimitingDataForEndpoint := getRateLimitingDataForEndpoint(server, method, route, routeParsed) - if rateLimitingDataForEndpoint == nil { - return - } - - incrementSlidingWindowEntry(rateLimitingDataForEndpoint.UserCounts, user) - incrementSlidingWindowEntry(rateLimitingDataForEndpoint.IpCounts, ip) - incrementSlidingWindowEntry(rateLimitingDataForEndpoint.RateLimitGroupCounts, rateLimitGroup) -} - func isRateLimitingThresholdExceeded(config *RateLimitingConfig, countsMap map[string]*SlidingWindow, key string) bool { counts, exists := countsMap[key] if !exists { @@ -321,31 +307,37 @@ func getRateLimitingStatus(server *ServerData, method, route, routeParsed, user, } server.RateLimitingMutex.RLock() - defer server.RateLimitingMutex.RUnlock() - rateLimitingDataMatch := getRateLimitingDataForEndpoint(server, method, route, routeParsed) + server.RateLimitingMutex.RUnlock() + if rateLimitingDataMatch == nil { return &protos.RateLimitingStatus{Block: false} } + rateLimitingDataMatch.Mutex.Lock() + defer rateLimitingDataMatch.Mutex.Unlock() + if rateLimitGroup != "" { // If the rate limit group exists, we only try to rate limit by rate limit group if isRateLimitingThresholdExceeded(&rateLimitingDataMatch.Config, rateLimitingDataMatch.RateLimitGroupCounts, rateLimitGroup) { log.Infof(server.Logger, "Rate limited request for group %s - %s %s - %v", rateLimitGroup, method, routeParsed, rateLimitingDataMatch.RateLimitGroupCounts[rateLimitGroup]) return &protos.RateLimitingStatus{Block: true, Trigger: "group"} } + incrementSlidingWindowEntry(rateLimitingDataMatch.RateLimitGroupCounts, rateLimitGroup) } else if user != "" { // Otherwise, if the user exists, we try to rate limit by user if isRateLimitingThresholdExceeded(&rateLimitingDataMatch.Config, rateLimitingDataMatch.UserCounts, user) { log.Infof(server.Logger, "Rate limited request for user %s - %s %s - %v", user, method, routeParsed, rateLimitingDataMatch.UserCounts[user]) return &protos.RateLimitingStatus{Block: true, Trigger: "user"} } + incrementSlidingWindowEntry(rateLimitingDataMatch.UserCounts, user) } else { // Otherwise, we try to rate limit by ip if isRateLimitingThresholdExceeded(&rateLimitingDataMatch.Config, rateLimitingDataMatch.IpCounts, ip) { log.Infof(server.Logger, "Rate limited request for ip %s - %s %s - %v", ip, method, routeParsed, rateLimitingDataMatch.IpCounts[ip]) return &protos.RateLimitingStatus{Block: true, Trigger: "ip"} } + incrementSlidingWindowEntry(rateLimitingDataMatch.IpCounts, ip) } return &protos.RateLimitingStatus{Block: false} diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 983e75105..644fa472b 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -80,7 +80,6 @@ func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestM if req.GetShouldDiscoverRoute() || req.GetRateLimited() { go storeTotalStats(server, req.GetRateLimited()) go storeRoute(server, req.GetMethod(), req.GetRouteParsed(), req.GetApiSpec(), req.GetRateLimited()) - go updateRateLimitingCounts(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) } go updateAttackWaveCountsAndDetect(server, req.GetIsWebScanner(), req.GetIp(), req.GetUser(), req.GetUserAgent(), req.GetMethod(), req.GetUrl()) diff --git a/lib/agent/rate_limiting/rate_limiting.go b/lib/agent/rate_limiting/rate_limiting.go index a33ca3210..6a50dd682 100644 --- a/lib/agent/rate_limiting/rate_limiting.go +++ b/lib/agent/rate_limiting/rate_limiting.go @@ -6,13 +6,20 @@ import ( ) func AdvanceRateLimitingQueues(server *ServerData) { - server.RateLimitingMutex.Lock() - defer server.RateLimitingMutex.Unlock() + server.RateLimitingMutex.RLock() + endpoints := make([]*RateLimitingValue, 0, len(server.RateLimitingMap)) for _, endpoint := range server.RateLimitingMap { + endpoints = append(endpoints, endpoint) + } + server.RateLimitingMutex.RUnlock() + + for _, endpoint := range endpoints { + endpoint.Mutex.Lock() AdvanceSlidingWindowMap(endpoint.UserCounts, endpoint.Config.WindowSizeInMinutes) AdvanceSlidingWindowMap(endpoint.IpCounts, endpoint.Config.WindowSizeInMinutes) AdvanceSlidingWindowMap(endpoint.RateLimitGroupCounts, endpoint.Config.WindowSizeInMinutes) + endpoint.Mutex.Unlock() } } diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 47bbb9465..5ad3911fa 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -102,6 +102,14 @@ bool LoadLaravelEnvFile() { This function reads environment variables from $_SERVER for FrankenPHP compatibility. */ std::string GetFrankenEnvVariable(const std::string& env_key) { + if (AIKIDO_GLOBAL(sapi_name) != "frankenphp") { + return ""; + } + + // Force $_SERVER autoglobal to be initialized (it's lazily loaded in PHP) + // This is CRITICAL in ZTS mode to ensure each thread gets request-specific $_SERVER values + zend_is_auto_global_str(ZEND_STRL("_SERVER")); + if (Z_TYPE(PG(http_globals)[TRACK_VARS_SERVER]) != IS_ARRAY) { AIKIDO_LOG_DEBUG("franken_env[%s] = (empty - $_SERVER not an array)\n", env_key.c_str()); return ""; @@ -132,17 +140,22 @@ std::string GetLaravelEnvVariable(const std::string& env_key) { } /* - Load env variables from the following sources (in this order): + Load env variables from the following sources (priority order): - System environment variables - - PHP environment variables - - FrankenPHP environment variables + - FrankenPHP environment variables ($_SERVER - request-specific, thread-safe) + - PHP environment variables - Laravel environment variables + + Order is critical: In multithreaded environments (FrankenPHP worker/classic, ZTS), + getenv() returns cached process-level values that may belong to a different request. + $_SERVER must be checked first to get fresh, request-specific environment data. */ + using EnvGetterFn = std::string(*)(const std::string&); EnvGetterFn envGetters[] = { &GetSystemEnvVariable, - &GetPhpEnvVariable, &GetFrankenEnvVariable, + &GetPhpEnvVariable, &GetLaravelEnvVariable }; diff --git a/tests/server/test_domains_limits/test.py b/tests/server/test_domains_limits/test.py index 3c1c7cf57..db9f2b668 100755 --- a/tests/server/test_domains_limits/test.py +++ b/tests/server/test_domains_limits/test.py @@ -24,7 +24,9 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - domains = [d["hostname"] for d in events[1]["hostnames"]] + + all_hostnames = aggregate_field_from_heartbeats("hostnames", unique_key="hostname") + domains = [d["hostname"] for d in all_hostnames] assert len(domains) == 2000, f"Expected 2000 domains, got {len(domains)}" assert generated_domains[0] + ".com" not in domains, f"Domain {generated_domains[0]} should not be in reported domains" assert generated_domains[-1] + ".com" in domains, f"Domain {generated_domains[-1]} should be in reported domains" diff --git a/tests/server/test_routes_limits/test.py b/tests/server/test_routes_limits/test.py index b14852760..354e9f8d5 100755 --- a/tests/server/test_routes_limits/test.py +++ b/tests/server/test_routes_limits/test.py @@ -28,7 +28,9 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - paths = [p["path"] for p in events[1]["routes"]] + + all_routes = aggregate_field_from_heartbeats("routes", unique_key="path") + paths = [p["path"] for p in all_routes] assert len(paths) == 5000, f"Expected 5000 routes, got {len(paths)}" assert routes[0] not in paths, f"Route {routes[0]} should not be in reported paths" assert routes[-1] in paths, f"Route {routes[-1]} should be in reported paths" diff --git a/tests/server/test_tor_monitored_ip/index.php b/tests/server/test_tor_monitored_ip/index.php index b05fef240..64697f6bb 100755 --- a/tests/server/test_tor_monitored_ip/index.php +++ b/tests/server/test_tor_monitored_ip/index.php @@ -1,5 +1,7 @@ diff --git a/tests/server/test_user_agent_monitored/index.php b/tests/server/test_user_agent_monitored/index.php index b05fef240..64697f6bb 100755 --- a/tests/server/test_user_agent_monitored/index.php +++ b/tests/server/test_user_agent_monitored/index.php @@ -1,5 +1,7 @@ diff --git a/tests/server/test_user_limits/test.py b/tests/server/test_user_limits/test.py index 422668abf..cf5b40a85 100755 --- a/tests/server/test_user_limits/test.py +++ b/tests/server/test_user_limits/test.py @@ -24,7 +24,9 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - users = [u["id"] for u in events[1]["users"]] + + all_users = aggregate_field_from_heartbeats("users", unique_key="id") + users = [u["id"] for u in all_users] assert len(users) == 2000, f"Expected 2000 users, got {len(users)}" assert generated_users[0] not in users, f"User {generated_users[0]} should not be in reported users" assert generated_users[-1] in users, f"User {generated_users[-1]} should be in reported users" diff --git a/tests/testlib/testlib.py b/tests/testlib/testlib.py index eedfa4e41..74fdb2a47 100644 --- a/tests/testlib/testlib.py +++ b/tests/testlib/testlib.py @@ -97,8 +97,76 @@ def apply_config(config_file): mock_server_set_config_file(config_file) time.sleep(120) +_all_heartbeats = [] + +def get_all_heartbeats(): + """ + Returns all heartbeats collected during the test. + Useful when you need to manually aggregate data across multiple heartbeats. + """ + global _all_heartbeats + return _all_heartbeats + +def aggregate_field_from_heartbeats(field_path, unique_key=None): + """ + Aggregates a field from all heartbeats. + + :param field_path: Dot-separated path to the field (e.g., "users", "hostnames", "routes") + :param unique_key: If provided, deduplicates by this key (e.g., "id" for users) + :return: List of aggregated items + """ + global _all_heartbeats + result = [] + seen_keys = set() + + for heartbeat in _all_heartbeats: + data = heartbeat + for key in field_path.split('.'): + data = data.get(key, []) + if not isinstance(data, (list, dict)): + break + + if isinstance(data, list): + for item in data: + if unique_key and isinstance(item, dict): + key_value = item.get(unique_key) + if key_value not in seen_keys: + seen_keys.add(key_value) + result.append(item) + elif not unique_key: + result.append(item) + + return result + def assert_events_length_is(events, length): + global _all_heartbeats assert isinstance(events, list), "Error: Events is not a list." + + started_events = [e for e in events if e.get('type') == 'started'] + heartbeats = [e for e in events if e.get('type') == 'heartbeat'] + other_events = [e for e in events if e.get('type') not in ['started', 'heartbeat']] + + _all_heartbeats = heartbeats.copy() + + filtered_started = [started_events[0]] if started_events else [] + + has_other_events = len(other_events) > 0 + + if heartbeats: + if not has_other_events and length == 2: + heartbeats_with_data = [h for h in heartbeats if h.get('stats', {}).get('requests', {}).get('total', 0) > 0] + if heartbeats_with_data: + filtered_heartbeat = [heartbeats_with_data[-1]] + else: + filtered_heartbeat = [heartbeats[-1]] + else: + filtered_heartbeat = [] + else: + filtered_heartbeat = [] + + events.clear() + events.extend(filtered_started + other_events + filtered_heartbeat) + assert len(events) == length, f"Error: Events list contains {len(events)} elements and not {length} elements. Events: {events}" subset_keys_contains_check = ["stack"] @@ -111,16 +179,32 @@ def assert_event_contains_subset(event_subset_key, event, event_subset, dry_mode Recursively checks that all keys and values in the subset JSON exist in the event JSON and have the same values. If a key in the subset is a list, all its elements must exist in the corresponding list in the event. + + When checking a heartbeat event, this function will try to match against ALL heartbeats + collected during the test, and pass if ANY one of them matches the expected subset. :param event: The event JSON dictionary :param subset: The subset JSON dictionary :raises AssertionError: If the subset is not fully contained within the event """ + global _all_heartbeats + def result(assertion_error): if dry_mode: return False raise assertion_error + if event_subset_key == "__root" and isinstance(event, dict) and event.get('type') == 'heartbeat' and isinstance(event_subset, dict) and event_subset.get('type') == 'heartbeat': + if len(_all_heartbeats) > 1: + print(f"Multiple heartbeats detected ({len(_all_heartbeats)}), checking subset against all heartbeats...") + for idx, heartbeat in enumerate(_all_heartbeats): + print(f"Trying heartbeat {idx + 1}/{len(_all_heartbeats)}...") + + if assert_event_contains_subset("__heartbeat_check", heartbeat, event_subset, dry_mode=True): + print(f"Match found in heartbeat {idx + 1}!") + return True + return result(AssertionError(f"Subset not found in any of the {len(_all_heartbeats)} heartbeats.")) + print(f"Searching {event_subset} in {event} (dry_mode = {dry_mode})...") if event is None: diff --git a/tools/run_server_tests.py b/tools/run_server_tests.py index 16bead5c8..9af8ff839 100755 --- a/tools/run_server_tests.py +++ b/tools/run_server_tests.py @@ -69,10 +69,24 @@ def is_port_in_active_use(port): result = sock.connect_ex(('127.0.0.1', port)) return result == 0 +def wait_for_port_ready(port, timeout=30, check_interval=0.1): + """Wait for a port to become ready to accept connections.""" + start_time = time.time() + + while time.time() - start_time < timeout: + if is_port_in_active_use(port): + return True + time.sleep(check_interval) + + return False + def generate_unique_port(): with lock: while True: port = random.randint(1024, 9999) + # Exclude port 2019 (Caddy admin endpoint) + if port == 2019: + continue if port not in used_ports and not is_port_in_active_use(port): used_ports.add(port) return port @@ -105,14 +119,26 @@ def _handle_test_scenario_impl(data, root_tests_dir, test_lib_dir, server, bench test_process = None try: print(f"Running {test_name}...") - print(f"Starting mock server on port {mock_port} with start_config.json for {test_name}...") - mock_aikido_core = subprocess.Popen(["python3", "-u", "mock_aikido_core.py", str(mock_port), data["config_path"]], cwd=os.path.dirname(os.path.abspath(__file__))) - time.sleep(5) + + # For frankenphp modes, mock servers are already started and FrankenPHP is already running + if server in ["frankenphp-worker", "frankenphp-classic"]: + mock_aikido_core = data.get("mock_process") + if not mock_aikido_core: + raise RuntimeError(f"Mock process not found for {test_name} in {server} mode") + else: + # For other server modes, start mock server normally + print(f"Starting mock server on port {mock_port} with start_config.json for {test_name}...") + mock_aikido_core = subprocess.Popen(["python3", "-u", "mock_aikido_core.py", str(mock_port), data["config_path"]], cwd=os.path.dirname(os.path.abspath(__file__))) + + # Wait for mock server to be ready (instead of fixed sleep) + if not wait_for_port_ready(mock_port, timeout=10): + raise RuntimeError(f"Mock server on port {mock_port} failed to start within 10 seconds") + print(f"Mock server on port {mock_port} is ready") - print(f"Starting {server} server on port {server_port} for {test_name}...") + print(f"Starting {server} server on port {server_port} for {test_name}...") - server_start = servers[server][START_SERVER] - server_process = server_start(data, test_lib_dir, valgrind) + server_start = servers[server][START_SERVER] + server_process = server_start(data, test_lib_dir, valgrind) time.sleep(20) @@ -148,7 +174,8 @@ def _handle_test_scenario_impl(data, root_tests_dir, test_lib_dir, server, bench failed_tests.append(test_name) finally: - if server_process: + # For frankenphp modes, don't stop the server (it's shared across all tests) + if server_process and server not in ["frankenphp-worker", "frankenphp-classic"]: server_process.terminate() server_process.wait() print(f"PHP server on port {server_port} stopped.") @@ -198,6 +225,35 @@ def main(root_tests_dir, test_lib_dir, test_dirs, server="php-built-in", benchma else: pre_tests() + # For frankenphp modes, start ALL mock servers BEFORE starting FrankenPHP + # since one FrankenPHP process handles all tests and initializes immediately + if server in ["frankenphp-worker", "frankenphp-classic"]: + print(f"Starting all mock servers for {server} mode...") + for test_data in tests_data: + test_name = test_data["test_name"] + mock_port = test_data["mock_port"] + print(f"Starting mock server on port {mock_port} for {test_name}...") + mock_process = subprocess.Popen( + ["python3", "-u", "mock_aikido_core.py", str(mock_port), test_data["config_path"]], + cwd=os.path.dirname(os.path.abspath(__file__)) + ) + test_data["mock_process"] = mock_process + + # Wait for ALL mock servers to be ready + print("Waiting for all mock servers to be ready...") + for test_data in tests_data: + mock_port = test_data["mock_port"] + test_name = test_data["test_name"] + if not wait_for_port_ready(mock_port, timeout=10): + raise RuntimeError(f"Mock server on port {mock_port} for {test_name} failed to start") + print(f"All {len(tests_data)} mock servers are ready!") + + # Now start FrankenPHP ONCE with all mock servers ready + server_start = servers[server][START_SERVER] + server_start(tests_data[0], test_lib_dir, valgrind) + print(f"FrankenPHP started with all mock servers ready") + time.sleep(5) # Give FrankenPHP time to initialize + threads = [] for test_data in tests_data: args = (test_data, root_tests_dir, test_lib_dir, server, benchmark, valgrind, debug) From cf01b3dbc708ce5c3883fd647220d26b57d624e8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Sun, 14 Dec 2025 23:56:50 +0000 Subject: [PATCH 128/170] Fix rate limiting status function to ensure proper mutex unlocking --- lib/agent/grpc/request.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/agent/grpc/request.go b/lib/agent/grpc/request.go index 87bcb182f..98dea0c37 100644 --- a/lib/agent/grpc/request.go +++ b/lib/agent/grpc/request.go @@ -308,13 +308,14 @@ func getRateLimitingStatus(server *ServerData, method, route, routeParsed, user, server.RateLimitingMutex.RLock() rateLimitingDataMatch := getRateLimitingDataForEndpoint(server, method, route, routeParsed) - server.RateLimitingMutex.RUnlock() if rateLimitingDataMatch == nil { + server.RateLimitingMutex.RUnlock() return &protos.RateLimitingStatus{Block: false} } rateLimitingDataMatch.Mutex.Lock() + server.RateLimitingMutex.RUnlock() defer rateLimitingDataMatch.Mutex.Unlock() if rateLimitGroup != "" { From 31db3c3e8fa9ac0b0a40498361c89a9b903cb252 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 12:33:22 +0000 Subject: [PATCH 129/170] Refactor server tickers initialization to ensure they start on the first request using sync.Once. Update ServerData structure to track event sending and ensure tickers are created only when needed. Adjust related functions for attack wave detection and rate limiting to align with new initialization logic. --- lib/agent/aikido_types/init_data.go | 17 ++-- .../attackWaveDetector.go | 7 +- lib/agent/cloud/cloud.go | 26 +++++- lib/agent/cloud/common.go | 5 ++ lib/agent/cloud/event_started.go | 8 ++ lib/agent/grpc/server.go | 25 +++++- lib/agent/rate_limiting/rate_limiting.go | 9 +- lib/agent/server_utils/server.go | 3 - tests/server/test_domains_limits/test.py | 4 +- tests/server/test_routes_limits/test.py | 4 +- tests/server/test_user_limits/test.py | 4 +- tests/testlib/testlib.py | 84 ------------------- 12 files changed, 88 insertions(+), 108 deletions(-) diff --git a/lib/agent/aikido_types/init_data.go b/lib/agent/aikido_types/init_data.go index 98c374e26..8428decc0 100644 --- a/lib/agent/aikido_types/init_data.go +++ b/lib/agent/aikido_types/init_data.go @@ -1,7 +1,6 @@ package aikido_types import ( - "main/constants" "main/log" "sync" "time" @@ -130,13 +129,13 @@ type ServerDataPolling struct { func NewServerDataPolling() *ServerDataPolling { return &ServerDataPolling{ HeartbeatRoutineChannel: make(chan struct{}), - HeartbeatTicker: time.NewTicker(10 * time.Minute), + HeartbeatTicker: nil, // Will be created on first request ConfigPollingRoutineChannel: make(chan struct{}), - ConfigPollingTicker: time.NewTicker(1 * time.Minute), + ConfigPollingTicker: time.NewTicker(1 * time.Minute), // Start immediately for config updates RateLimitingChannel: make(chan struct{}), - RateLimitingTicker: time.NewTicker(constants.MinRateLimitingIntervalInMs * time.Millisecond), + RateLimitingTicker: nil, // Will be created on first request AttackWaveChannel: make(chan struct{}), - AttackWaveTicker: time.NewTicker(1 * time.Minute), + AttackWaveTicker: nil, // Will be created on first request } } @@ -215,6 +214,14 @@ type ServerData struct { // Got some request info passed via gRPC to the Agent GotTraffic uint32 + // Tracks if the "started" event has been sent for this server + // In multi-worker mode (e.g., frankenphp-worker), only one worker should send it + SentStartedEvent uint32 + + // Ensures tickers start exactly once on first request + // Using sync.Once is safe to call from any context (including gRPC handlers) + StartTickersOnce sync.Once + // Last time this server established a gRPC connection LastConnectionTime int64 diff --git a/lib/agent/attack-wave-detection/attackWaveDetector.go b/lib/agent/attack-wave-detection/attackWaveDetector.go index 857baa4d5..212e40e69 100644 --- a/lib/agent/attack-wave-detection/attackWaveDetector.go +++ b/lib/agent/attack-wave-detection/attackWaveDetector.go @@ -3,6 +3,7 @@ package attack_wave_detection import ( . "main/aikido_types" "main/utils" + "time" ) func AdvanceAttackWaveQueues(server *ServerData) { @@ -19,9 +20,11 @@ func AdvanceAttackWaveQueues(server *ServerData) { } } -func Init(server *ServerData) { +// StartAttackWaveTicker starts the attack wave detection ticker +// Called on first request via sync.Once +func StartAttackWaveTicker(server *ServerData) { + server.PollingData.AttackWaveTicker = time.NewTicker(1 * time.Minute) utils.StartPollingRoutine(server.PollingData.AttackWaveChannel, server.PollingData.AttackWaveTicker, AdvanceAttackWaveQueues, server) - AdvanceAttackWaveQueues(server) } func Uninit(server *ServerData) { diff --git a/lib/agent/cloud/cloud.go b/lib/agent/cloud/cloud.go index 140749192..8454e109e 100644 --- a/lib/agent/cloud/cloud.go +++ b/lib/agent/cloud/cloud.go @@ -2,7 +2,9 @@ package cloud import ( . "main/aikido_types" + "main/constants" "main/utils" + "time" ) func Init(server *ServerData) { @@ -11,10 +13,32 @@ func Init(server *ServerData) { CheckConfigUpdatedAt(server) - utils.StartPollingRoutine(server.PollingData.HeartbeatRoutineChannel, server.PollingData.HeartbeatTicker, SendHeartbeatEvent, server) + // Start config polling immediately (for cloud config updates) + // Heartbeat and other tickers will start on first request via StartAllTickers() utils.StartPollingRoutine(server.PollingData.ConfigPollingRoutineChannel, server.PollingData.ConfigPollingTicker, CheckConfigUpdatedAt, server) } +// StartAllTickers starts all tickers on first request +// Called via sync.Once to ensure exactly-once execution, safe from any context +func StartAllTickers(server *ServerData) { + // Determine initial heartbeat interval based on cloud config + // Default to 10 minutes (conservative) if config was never fetched + heartbeatInterval := 10 * time.Minute + + // Only use faster 1-minute interval if we successfully fetched config + // and cloud indicates this is a new server (ReceivedAnyStats = false) + if server.CloudConfig.ConfigUpdatedAt > 0 { + if !server.CloudConfig.ReceivedAnyStats { + heartbeatInterval = 1 * time.Minute + } else if server.CloudConfig.HeartbeatIntervalInMS >= constants.MinHeartbeatIntervalInMS { + heartbeatInterval = time.Duration(server.CloudConfig.HeartbeatIntervalInMS) * time.Millisecond + } + } + + server.PollingData.HeartbeatTicker = time.NewTicker(heartbeatInterval) + utils.StartPollingRoutine(server.PollingData.HeartbeatRoutineChannel, server.PollingData.HeartbeatTicker, SendHeartbeatEvent, server) +} + func Uninit(server *ServerData) { utils.StopPollingRoutine(server.PollingData.HeartbeatRoutineChannel) utils.StopPollingRoutine(server.PollingData.ConfigPollingRoutineChannel) diff --git a/lib/agent/cloud/common.go b/lib/agent/cloud/common.go index 0d6112dd9..e939364b1 100644 --- a/lib/agent/cloud/common.go +++ b/lib/agent/cloud/common.go @@ -35,6 +35,11 @@ func GetAgentInfo(server *ServerData) AgentInfo { } func ResetHeartbeatTicker(server *ServerData) { + // HeartbeatTicker is created on first request, so it may be nil during initial config fetch + if server.PollingData.HeartbeatTicker == nil { + return + } + if !server.CloudConfig.ReceivedAnyStats { log.Info(server.Logger, "Resetting HeartbeatTicker to 1m!") server.PollingData.HeartbeatTicker.Reset(1 * time.Minute) diff --git a/lib/agent/cloud/event_started.go b/lib/agent/cloud/event_started.go index 85e713352..f40c798f5 100644 --- a/lib/agent/cloud/event_started.go +++ b/lib/agent/cloud/event_started.go @@ -4,9 +4,17 @@ import ( . "main/aikido_types" "main/constants" "main/utils" + "sync/atomic" ) func SendStartEvent(server *ServerData) { + // In multi-worker mode (e.g., frankenphp-worker), ensure only one worker sends the started event + // Use atomic compare-and-swap to guarantee exactly-once semantics + if !atomic.CompareAndSwapUint32(&server.SentStartedEvent, 0, 1) { + // Another worker already sent the started event + return + } + startedEvent := Started{ Type: "started", Agent: GetAgentInfo(server), diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 644fa472b..b14e9f5c8 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -3,11 +3,13 @@ package grpc import ( "context" "fmt" + attack_wave_detection "main/attack-wave-detection" "main/cloud" "main/constants" "main/globals" "main/ipc/protos" "main/log" + rate_limiting "main/rate_limiting" "main/server_utils" "main/utils" "net" @@ -67,6 +69,17 @@ func (s *GrpcServer) GetRateLimitingStatus(ctx context.Context, req *protos.Rate if server == nil { return &protos.RateLimitingStatus{Block: false}, nil } + + // Start all tickers on first request (exactly once via sync.Once) + server.StartTickersOnce.Do(func() { + cloud.StartAllTickers(server) + rate_limiting.StartRateLimitingTicker(server) + attack_wave_detection.StartAttackWaveTicker(server) + }) + + // Mark that this server has received traffic + atomic.StoreUint32(&server.GotTraffic, 1) + log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil } @@ -76,6 +89,17 @@ func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestM if server == nil { return &emptypb.Empty{}, nil } + + // Start all tickers on first request (exactly once via sync.Once) + server.StartTickersOnce.Do(func() { + cloud.StartAllTickers(server) + rate_limiting.StartRateLimitingTicker(server) + attack_wave_detection.StartAttackWaveTicker(server) + }) + + // Mark that this server has received traffic + atomic.StoreUint32(&server.GotTraffic, 1) + log.Debugf(server.Logger, "Received request metadata: %s %s %d %s %s %v", req.GetMethod(), req.GetRouteParsed(), req.GetStatusCode(), req.GetUser(), req.GetIp(), req.GetApiSpec()) if req.GetShouldDiscoverRoute() || req.GetRateLimited() { go storeTotalStats(server, req.GetRateLimited()) @@ -83,7 +107,6 @@ func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestM } go updateAttackWaveCountsAndDetect(server, req.GetIsWebScanner(), req.GetIp(), req.GetUser(), req.GetUserAgent(), req.GetMethod(), req.GetUrl()) - atomic.StoreUint32(&server.GotTraffic, 1) return &emptypb.Empty{}, nil } diff --git a/lib/agent/rate_limiting/rate_limiting.go b/lib/agent/rate_limiting/rate_limiting.go index 6a50dd682..35bdd2496 100644 --- a/lib/agent/rate_limiting/rate_limiting.go +++ b/lib/agent/rate_limiting/rate_limiting.go @@ -2,11 +2,12 @@ package rate_limiting import ( . "main/aikido_types" + "main/constants" "main/utils" + "time" ) func AdvanceRateLimitingQueues(server *ServerData) { - server.RateLimitingMutex.RLock() endpoints := make([]*RateLimitingValue, 0, len(server.RateLimitingMap)) for _, endpoint := range server.RateLimitingMap { @@ -23,9 +24,11 @@ func AdvanceRateLimitingQueues(server *ServerData) { } } -func Init(server *ServerData) { +// StartRateLimitingTicker starts the rate limiting ticker +// Called on first request via sync.Once +func StartRateLimitingTicker(server *ServerData) { + server.PollingData.RateLimitingTicker = time.NewTicker(constants.MinRateLimitingIntervalInMs * time.Millisecond) utils.StartPollingRoutine(server.PollingData.RateLimitingChannel, server.PollingData.RateLimitingTicker, AdvanceRateLimitingQueues, server) - AdvanceRateLimitingQueues(server) } func Uninit(server *ServerData) { diff --git a/lib/agent/server_utils/server.go b/lib/agent/server_utils/server.go index a767099d2..c61fbeabd 100644 --- a/lib/agent/server_utils/server.go +++ b/lib/agent/server_utils/server.go @@ -45,9 +45,6 @@ func Register(serverKey ServerKey, requestProcessorPID int32, req *protos.Config } else { cloud.SendStartEvent(server) } - - rate_limiting.Init(server) - attack_wave_detection.Init(server) } func Unregister(serverKey ServerKey) { diff --git a/tests/server/test_domains_limits/test.py b/tests/server/test_domains_limits/test.py index db9f2b668..3c1c7cf57 100755 --- a/tests/server/test_domains_limits/test.py +++ b/tests/server/test_domains_limits/test.py @@ -24,9 +24,7 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - - all_hostnames = aggregate_field_from_heartbeats("hostnames", unique_key="hostname") - domains = [d["hostname"] for d in all_hostnames] + domains = [d["hostname"] for d in events[1]["hostnames"]] assert len(domains) == 2000, f"Expected 2000 domains, got {len(domains)}" assert generated_domains[0] + ".com" not in domains, f"Domain {generated_domains[0]} should not be in reported domains" assert generated_domains[-1] + ".com" in domains, f"Domain {generated_domains[-1]} should be in reported domains" diff --git a/tests/server/test_routes_limits/test.py b/tests/server/test_routes_limits/test.py index 354e9f8d5..b14852760 100755 --- a/tests/server/test_routes_limits/test.py +++ b/tests/server/test_routes_limits/test.py @@ -28,9 +28,7 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - - all_routes = aggregate_field_from_heartbeats("routes", unique_key="path") - paths = [p["path"] for p in all_routes] + paths = [p["path"] for p in events[1]["routes"]] assert len(paths) == 5000, f"Expected 5000 routes, got {len(paths)}" assert routes[0] not in paths, f"Route {routes[0]} should not be in reported paths" assert routes[-1] in paths, f"Route {routes[-1]} should be in reported paths" diff --git a/tests/server/test_user_limits/test.py b/tests/server/test_user_limits/test.py index cf5b40a85..422668abf 100755 --- a/tests/server/test_user_limits/test.py +++ b/tests/server/test_user_limits/test.py @@ -24,9 +24,7 @@ def run_test(): assert_events_length_is(events, 2) assert_started_event_is_valid(events[0]) - - all_users = aggregate_field_from_heartbeats("users", unique_key="id") - users = [u["id"] for u in all_users] + users = [u["id"] for u in events[1]["users"]] assert len(users) == 2000, f"Expected 2000 users, got {len(users)}" assert generated_users[0] not in users, f"User {generated_users[0]} should not be in reported users" assert generated_users[-1] in users, f"User {generated_users[-1]} should be in reported users" diff --git a/tests/testlib/testlib.py b/tests/testlib/testlib.py index 74fdb2a47..eedfa4e41 100644 --- a/tests/testlib/testlib.py +++ b/tests/testlib/testlib.py @@ -97,76 +97,8 @@ def apply_config(config_file): mock_server_set_config_file(config_file) time.sleep(120) -_all_heartbeats = [] - -def get_all_heartbeats(): - """ - Returns all heartbeats collected during the test. - Useful when you need to manually aggregate data across multiple heartbeats. - """ - global _all_heartbeats - return _all_heartbeats - -def aggregate_field_from_heartbeats(field_path, unique_key=None): - """ - Aggregates a field from all heartbeats. - - :param field_path: Dot-separated path to the field (e.g., "users", "hostnames", "routes") - :param unique_key: If provided, deduplicates by this key (e.g., "id" for users) - :return: List of aggregated items - """ - global _all_heartbeats - result = [] - seen_keys = set() - - for heartbeat in _all_heartbeats: - data = heartbeat - for key in field_path.split('.'): - data = data.get(key, []) - if not isinstance(data, (list, dict)): - break - - if isinstance(data, list): - for item in data: - if unique_key and isinstance(item, dict): - key_value = item.get(unique_key) - if key_value not in seen_keys: - seen_keys.add(key_value) - result.append(item) - elif not unique_key: - result.append(item) - - return result - def assert_events_length_is(events, length): - global _all_heartbeats assert isinstance(events, list), "Error: Events is not a list." - - started_events = [e for e in events if e.get('type') == 'started'] - heartbeats = [e for e in events if e.get('type') == 'heartbeat'] - other_events = [e for e in events if e.get('type') not in ['started', 'heartbeat']] - - _all_heartbeats = heartbeats.copy() - - filtered_started = [started_events[0]] if started_events else [] - - has_other_events = len(other_events) > 0 - - if heartbeats: - if not has_other_events and length == 2: - heartbeats_with_data = [h for h in heartbeats if h.get('stats', {}).get('requests', {}).get('total', 0) > 0] - if heartbeats_with_data: - filtered_heartbeat = [heartbeats_with_data[-1]] - else: - filtered_heartbeat = [heartbeats[-1]] - else: - filtered_heartbeat = [] - else: - filtered_heartbeat = [] - - events.clear() - events.extend(filtered_started + other_events + filtered_heartbeat) - assert len(events) == length, f"Error: Events list contains {len(events)} elements and not {length} elements. Events: {events}" subset_keys_contains_check = ["stack"] @@ -179,32 +111,16 @@ def assert_event_contains_subset(event_subset_key, event, event_subset, dry_mode Recursively checks that all keys and values in the subset JSON exist in the event JSON and have the same values. If a key in the subset is a list, all its elements must exist in the corresponding list in the event. - - When checking a heartbeat event, this function will try to match against ALL heartbeats - collected during the test, and pass if ANY one of them matches the expected subset. :param event: The event JSON dictionary :param subset: The subset JSON dictionary :raises AssertionError: If the subset is not fully contained within the event """ - global _all_heartbeats - def result(assertion_error): if dry_mode: return False raise assertion_error - if event_subset_key == "__root" and isinstance(event, dict) and event.get('type') == 'heartbeat' and isinstance(event_subset, dict) and event_subset.get('type') == 'heartbeat': - if len(_all_heartbeats) > 1: - print(f"Multiple heartbeats detected ({len(_all_heartbeats)}), checking subset against all heartbeats...") - for idx, heartbeat in enumerate(_all_heartbeats): - print(f"Trying heartbeat {idx + 1}/{len(_all_heartbeats)}...") - - if assert_event_contains_subset("__heartbeat_check", heartbeat, event_subset, dry_mode=True): - print(f"Match found in heartbeat {idx + 1}!") - return True - return result(AssertionError(f"Subset not found in any of the {len(_all_heartbeats)} heartbeats.")) - print(f"Searching {event_subset} in {event} (dry_mode = {dry_mode})...") if event is None: From 7c3ba4aa749c6ecc808267896072e1b992075f26 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 12:43:54 +0000 Subject: [PATCH 130/170] fix merge conflicts --- lib/php-extension/HandleBypassedIp.cpp | 4 +- lib/request-processor/aikido_types/handle.go | 1 + .../handle_blocking_request.go | 6 +-- .../handle_request_metadata.go | 48 +++++++++---------- 4 files changed, 30 insertions(+), 29 deletions(-) diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index 90906055e..2842db13b 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -12,8 +12,8 @@ void InitIpBypassCheck() { try { std::string output; - requestProcessor.SendEvent(EVENT_GET_IS_IP_BYPASSED, output); - action.Execute(output); + AIKIDO_GLOBAL(requestProcessor).SendEvent(EVENT_GET_IS_IP_BYPASSED, output); + AIKIDO_GLOBAL(action).Execute(output); } catch (const std::exception &e) { AIKIDO_LOG_ERROR("Exception encountered in processing IP bypass check event: %s\n", e.what()); } diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index 61adc784d..b3b17d9dc 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -24,4 +24,5 @@ type RequestShutdownParams struct { QueryParsed map[string]interface{} IsWebScanner bool ShouldDiscoverRoute bool + IsIpBypassed bool } diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index c2b198f30..9b170ba47 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -150,9 +150,9 @@ func GetBypassAction() string { return string(actionJson) } -func OnGetIsIpBypassed() string { - log.Debugf("OnGetIsIpBypassed called!") - if context.IsIpBypassed() { +func OnGetIsIpBypassed(inst *instance.RequestProcessorInstance) string { + log.Debugf(inst, "OnGetIsIpBypassed called!") + if context.IsIpBypassed(inst) { return GetBypassAction() } return "" diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 051ca36be..1e7ddd8af 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -39,31 +39,31 @@ func OnPostRequest(inst *instance.RequestProcessorInstance) string { if inst.GetCurrentServer() == nil { return "" } - if !context.IsIpBypassed() { - params := RequestShutdownParams{ - ThreadID: inst.GetThreadID(), - Token: inst.GetCurrentToken(), - Method: context.GetMethod(inst), - Route: context.GetRoute(inst), - RouteParsed: context.GetParsedRoute(inst), - StatusCode: context.GetStatusCode(inst), - User: context.GetUserId(inst), - UserAgent: context.GetUserAgent(inst), - IP: context.GetIp(inst), - Url: context.GetUrl(inst), - RateLimitGroup: context.GetRateLimitGroup(inst), - RateLimited: context.IsEndpointRateLimited(inst), - QueryParsed: context.GetQueryParsed(inst), - IsIpBypassed: context.IsIpBypassed(inst), - APISpec: api_discovery.GetApiInfo(inst, inst.GetCurrentServer()), - } + if !context.IsIpBypassed(inst) { + params := RequestShutdownParams{ + ThreadID: inst.GetThreadID(), + Token: inst.GetCurrentToken(), + Method: context.GetMethod(inst), + Route: context.GetRoute(inst), + RouteParsed: context.GetParsedRoute(inst), + StatusCode: context.GetStatusCode(inst), + User: context.GetUserId(inst), + UserAgent: context.GetUserAgent(inst), + IP: context.GetIp(inst), + Url: context.GetUrl(inst), + RateLimitGroup: context.GetRateLimitGroup(inst), + RateLimited: context.IsEndpointRateLimited(inst), + QueryParsed: context.GetQueryParsed(inst), + IsIpBypassed: context.IsIpBypassed(inst), + APISpec: api_discovery.GetApiInfo(inst, inst.GetCurrentServer()), + } - context.Clear(inst) - - go func() { - OnRequestShutdownReporting(params) - }() - } + context.Clear(inst) + + go func() { + OnRequestShutdownReporting(params) + }() + } return "" } From 213ed41877f3735e53d001aecf43fec06cbdf36a Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 13:07:45 +0000 Subject: [PATCH 131/170] Refactor IP bypass handling to use global accessors. Update initialization in PhpLifecycle to reset bypass state. --- lib/php-extension/Action.cpp | 2 +- lib/php-extension/HandleBypassedIp.cpp | 25 +++++++++++++++++-------- lib/php-extension/PhpLifecycle.cpp | 3 ++- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/lib/php-extension/Action.cpp b/lib/php-extension/Action.cpp index 4125038fd..997d5b296 100644 --- a/lib/php-extension/Action.cpp +++ b/lib/php-extension/Action.cpp @@ -38,7 +38,7 @@ ACTION_STATUS Action::executeStore(json &event) { } ACTION_STATUS Action::executeBypassIp(json &event) { - isIpBypassed = true; + AIKIDO_GLOBAL(isIpBypassed) = true; return CONTINUE; } diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index 2842db13b..b4e26c155 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -1,13 +1,14 @@ #include "Includes.h" -// This variable is used to check if the request is bypassed, -// if true, all blocking checks will be skipped. -bool isIpBypassed = false; +// The isIpBypassed module global variable is used to store whether the current IP is bypassed. +// If true, all blocking checks will be skipped. +// Accessed via AIKIDO_GLOBAL(isIpBypassed). -void InitIpBypassCheck() { - // Reset state for new request - isIpBypassed = false; +// The checkedIpBypass module global variable is used to check if IP bypass check +// has already been called, in order to avoid multiple calls to this function. +// Accessed via AIKIDO_GLOBAL(checkedIpBypass). +void InitIpBypassCheck() { ScopedTimer scopedTimer("check_ip_bypass", "aikido_op"); try { @@ -19,8 +20,16 @@ void InitIpBypassCheck() { } } - bool IsAikidoDisabledOrBypassed() { - return AIKIDO_GLOBAL(disable) == true || isIpBypassed; + if (AIKIDO_GLOBAL(disable) == true) { + return true; + } + + if (!AIKIDO_GLOBAL(checkedIpBypass)) { + AIKIDO_GLOBAL(checkedIpBypass) = true; + InitIpBypassCheck(); + } + + return AIKIDO_GLOBAL(isIpBypassed); } diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index 5972eb30e..d579d5977 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -16,7 +16,8 @@ void PhpLifecycle::RequestInit() { AIKIDO_GLOBAL(requestProcessor).RequestInit(); AIKIDO_GLOBAL(checkedAutoBlock) = false; AIKIDO_GLOBAL(checkedShouldBlockRequest) = false; - InitIpBypassCheck(); + AIKIDO_GLOBAL(checkedIpBypass) = false; + AIKIDO_GLOBAL(isIpBypassed) = false; } void PhpLifecycle::RequestShutdown() { From 004d110175c60addd98a32c7e51e24a4e05f833c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 13:11:33 +0000 Subject: [PATCH 132/170] + --- lib/php-extension/include/HandleBypassedIp.h | 10 +--------- lib/php-extension/include/php_aikido.h | 2 ++ 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/lib/php-extension/include/HandleBypassedIp.h b/lib/php-extension/include/HandleBypassedIp.h index e8d86ee8f..e0d6f1b69 100644 --- a/lib/php-extension/include/HandleBypassedIp.h +++ b/lib/php-extension/include/HandleBypassedIp.h @@ -1,13 +1,5 @@ #pragma once -// This variable is used to check if the request is bypassed, -// if true, all blocking checks will be skipped. -extern bool isIpBypassed; - -// Initialize the IP bypass check at request start. -// Resets state and checks if the current IP should be bypassed. -// This should be called during request initialization. -void InitIpBypassCheck(); - // Check if Aikido is disabled or the current IP is bypassed. +// The IP bypass check is performed lazily on first call. bool IsAikidoDisabledOrBypassed(); diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index 4c90b34ec..bb19eac26 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -41,6 +41,8 @@ uint64_t totalOverheadForCurrentRequest; bool laravelEnvLoaded; bool checkedAutoBlock; bool checkedShouldBlockRequest; +bool checkedIpBypass; +bool isIpBypassed; HashTable *global_ast_to_clean; void (*original_ast_process)(zend_ast *ast); // IMPORTANT: The order of these objects MUST NOT be changed due to object interdependencies. From 7b2e79ac888b8d4539ed7cb7f7d51879cd2e8736 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 13:36:23 +0000 Subject: [PATCH 133/170] Add request initialization check in IP bypass handling --- lib/php-extension/HandleBypassedIp.cpp | 4 ++++ lib/php-extension/include/RequestProcessor.h | 1 + 2 files changed, 5 insertions(+) diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index b4e26c155..19fbc0a9f 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -25,6 +25,10 @@ bool IsAikidoDisabledOrBypassed() { return true; } + if (!AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { + return false; + } + if (!AIKIDO_GLOBAL(checkedIpBypass)) { AIKIDO_GLOBAL(checkedIpBypass) = true; InitIpBypassCheck(); diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index 2fe0adfa6..bea392581 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -44,6 +44,7 @@ class RequestProcessor { bool RequestInit(); bool SendEvent(EVENT_ID eventId, std::string& output); bool IsBlockingEnabled(); + bool IsRequestInitialized() const { return requestInitialized; } bool ReportStats(); void LoadConfig(const std::string& previousToken, const std::string& currentToken); void LoadConfigFromEnvironment(); From dcebcfdc696e3a875847d776502c34a249130c46 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 13:45:51 +0000 Subject: [PATCH 134/170] Initialize IP bypass state in global structure and update bypass check logic --- lib/php-extension/Aikido.cpp | 2 ++ lib/php-extension/HandleBypassedIp.cpp | 4 ---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 868ce48f3..756049537 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -113,6 +113,8 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->laravelEnvLoaded = false; aikido_globals->checkedAutoBlock = false; aikido_globals->checkedShouldBlockRequest = false; + aikido_globals->checkedIpBypass = false; + aikido_globals->isIpBypassed = false; aikido_globals->global_ast_to_clean = nullptr; aikido_globals->original_ast_process = nullptr; #ifdef ZTS diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index 19fbc0a9f..b4e26c155 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -25,10 +25,6 @@ bool IsAikidoDisabledOrBypassed() { return true; } - if (!AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { - return false; - } - if (!AIKIDO_GLOBAL(checkedIpBypass)) { AIKIDO_GLOBAL(checkedIpBypass) = true; InitIpBypassCheck(); From 529927d094f747f754a7cc8a4571c4ccabae444c Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 13:46:34 +0000 Subject: [PATCH 135/170] Remove inline method IsRequestInitialized from RequestProcessor class --- lib/php-extension/include/RequestProcessor.h | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index bea392581..2fe0adfa6 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -44,7 +44,6 @@ class RequestProcessor { bool RequestInit(); bool SendEvent(EVENT_ID eventId, std::string& output); bool IsBlockingEnabled(); - bool IsRequestInitialized() const { return requestInitialized; } bool ReportStats(); void LoadConfig(const std::string& previousToken, const std::string& currentToken); void LoadConfigFromEnvironment(); From d58dc9641694d29288880245635149878e80457f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 14:47:04 +0000 Subject: [PATCH 136/170] Add EnsureTickersStarted RPC and refactor ticker initialization logic --- lib/agent/grpc/server.go | 41 ++++++++++--------- lib/ipc.proto | 6 +++ lib/request-processor/aikido_types/handle.go | 1 + lib/request-processor/grpc/client.go | 18 ++++++++ .../handle_request_metadata.go | 9 +++- 5 files changed, 54 insertions(+), 21 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index b14e9f5c8..7c5f00896 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -70,16 +70,6 @@ func (s *GrpcServer) GetRateLimitingStatus(ctx context.Context, req *protos.Rate return &protos.RateLimitingStatus{Block: false}, nil } - // Start all tickers on first request (exactly once via sync.Once) - server.StartTickersOnce.Do(func() { - cloud.StartAllTickers(server) - rate_limiting.StartRateLimitingTicker(server) - attack_wave_detection.StartAttackWaveTicker(server) - }) - - // Mark that this server has received traffic - atomic.StoreUint32(&server.GotTraffic, 1) - log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil } @@ -90,16 +80,6 @@ func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestM return &emptypb.Empty{}, nil } - // Start all tickers on first request (exactly once via sync.Once) - server.StartTickersOnce.Do(func() { - cloud.StartAllTickers(server) - rate_limiting.StartRateLimitingTicker(server) - attack_wave_detection.StartAttackWaveTicker(server) - }) - - // Mark that this server has received traffic - atomic.StoreUint32(&server.GotTraffic, 1) - log.Debugf(server.Logger, "Received request metadata: %s %s %d %s %s %v", req.GetMethod(), req.GetRouteParsed(), req.GetStatusCode(), req.GetUser(), req.GetIp(), req.GetApiSpec()) if req.GetShouldDiscoverRoute() || req.GetRateLimited() { go storeTotalStats(server, req.GetRateLimited()) @@ -193,6 +173,27 @@ func (s *GrpcServer) OnMonitoredUserAgentMatch(ctx context.Context, req *protos. return &emptypb.Empty{}, nil } +func (s *GrpcServer) EnsureTickersStarted(ctx context.Context, req *protos.ServerIdentifier) (*emptypb.Empty, error) { + server := globals.GetServer(ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()}) + if server == nil { + return &emptypb.Empty{}, nil + } + + // Start all tickers on first request (exactly once via sync.Once) + // This is called explicitly from the request processor on the first request + server.StartTickersOnce.Do(func() { + log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(req.GetToken())) + cloud.StartAllTickers(server) + rate_limiting.StartRateLimitingTicker(server) + attack_wave_detection.StartAttackWaveTicker(server) + }) + + // Mark that this server has received traffic + atomic.StoreUint32(&server.GotTraffic, 1) + + return &emptypb.Empty{}, nil +} + var grpcServer *grpc.Server func StartServer(lis net.Listener) { diff --git a/lib/ipc.proto b/lib/ipc.proto index 0155712d0..f8f98e8d6 100644 --- a/lib/ipc.proto +++ b/lib/ipc.proto @@ -19,6 +19,7 @@ service Aikido { rpc OnMiddlewareInstalled(MiddlewareInstalledInfo) returns (google.protobuf.Empty); rpc OnMonitoredIpMatch(MonitoredIpMatch) returns (google.protobuf.Empty); rpc OnMonitoredUserAgentMatch(MonitoredUserAgentMatch) returns (google.protobuf.Empty); + rpc EnsureTickersStarted(ServerIdentifier) returns (google.protobuf.Empty); } message Config { @@ -231,4 +232,9 @@ message MonitoredUserAgentMatch { string token = 1; int32 server_pid = 2; repeated string lists = 3; +} + +message ServerIdentifier { + string token = 1; + int32 server_pid = 2; } \ No newline at end of file diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index b3b17d9dc..7b62ff3cb 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -25,4 +25,5 @@ type RequestShutdownParams struct { IsWebScanner bool ShouldDiscoverRoute bool IsIpBypassed bool + Server *ServerData } diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index c5a833c86..072fc136b 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -112,6 +112,24 @@ func OnPackages(server *ServerData, packages map[string]string) { log.Debugf(nil, "Packages sent via socket!") } +/* Ensure tickers are started for the server (called on first request) */ +func EnsureTickersStarted(server *ServerData) { + if client == nil { + return + } + + ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) + defer cancel() + + _, err := client.EnsureTickersStarted(ctx, &protos.ServerIdentifier{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) + if err != nil { + log.Warnf(nil, "Could not ensure tickers started: %v", err) + return + } + + log.Debugf(nil, "Tickers initialization requested via socket!") +} + /* Send request metadata (route & method) to Aikido Agent via gRPC */ func GetRateLimitingStatus(inst *instance.RequestProcessorInstance, server *ServerData, method string, route string, routeParsed string, user string, ip string, rateLimitGroup string, timeout time.Duration) *protos.RateLimitingStatus { if client == nil || server == nil { diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 1e7ddd8af..212b2a47e 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -21,6 +21,10 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } + // Ensure tickers are started on first request from a non-bypassed IP + // This ensures heartbeats and other background operations start when we have actual traffic to monitor + grpc.EnsureTickersStarted(params.Server) + log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs if !params.IsIpBypassed { @@ -36,9 +40,11 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { } func OnPostRequest(inst *instance.RequestProcessorInstance) string { - if inst.GetCurrentServer() == nil { + server := inst.GetCurrentServer() + if server == nil { return "" } + if !context.IsIpBypassed(inst) { params := RequestShutdownParams{ ThreadID: inst.GetThreadID(), @@ -56,6 +62,7 @@ func OnPostRequest(inst *instance.RequestProcessorInstance) string { QueryParsed: context.GetQueryParsed(inst), IsIpBypassed: context.IsIpBypassed(inst), APISpec: api_discovery.GetApiInfo(inst, inst.GetCurrentServer()), + Server: server, } context.Clear(inst) From f7201b666f69aa95f3dcc0918ef8ee22f933dc9d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 14:48:59 +0000 Subject: [PATCH 137/170] Ensure tickers are started for rate limiting in blocking status check --- lib/request-processor/handle_blocking_request.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index 9b170ba47..224d451f7 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -55,6 +55,8 @@ func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { } if context.IsEndpointRateLimitingEnabled(inst) { + grpc.EnsureTickersStarted(server) + // If request is monitored for rate limiting, // do a sync call via gRPC to see if the request should be blocked or not method := context.GetMethod(inst) From 6d40bc366852d2ba1aecbeabce5fdcaaefd65b2b Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 14:55:20 +0000 Subject: [PATCH 138/170] Implement ticker initialization for rate limiting in gRPC server and remove redundant ticker start call in blocking request handling --- lib/agent/grpc/server.go | 7 +++++++ lib/request-processor/handle_blocking_request.go | 2 -- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 7c5f00896..820617b0c 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -70,6 +70,13 @@ func (s *GrpcServer) GetRateLimitingStatus(ctx context.Context, req *protos.Rate return &protos.RateLimitingStatus{Block: false}, nil } + server.StartTickersOnce.Do(func() { + log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\" (via rate limiting)", utils.AnonymizeToken(req.GetToken())) + cloud.StartAllTickers(server) + rate_limiting.StartRateLimitingTicker(server) + attack_wave_detection.StartAttackWaveTicker(server) + }) + log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil } diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index 224d451f7..9b170ba47 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -55,8 +55,6 @@ func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { } if context.IsEndpointRateLimitingEnabled(inst) { - grpc.EnsureTickersStarted(server) - // If request is monitored for rate limiting, // do a sync call via gRPC to see if the request should be blocked or not method := context.GetMethod(inst) From bf328bf6129cfbfe49369dfa745f0efa851d8918 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 15:42:02 +0000 Subject: [PATCH 139/170] Refactor ticker management --- lib/agent/grpc/server.go | 27 ++++++++----------- lib/request-processor/grpc/client.go | 10 +++---- .../handle_request_metadata.go | 5 ++-- 3 files changed, 18 insertions(+), 24 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 820617b0c..2529dcea9 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -70,15 +70,20 @@ func (s *GrpcServer) GetRateLimitingStatus(ctx context.Context, req *protos.Rate return &protos.RateLimitingStatus{Block: false}, nil } + startTickersOnce(server, "rate limiting") + + log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) + return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil +} + +// startTickersOnce is a helper function to start tickers exactly once +func startTickersOnce(server *ServerData, source string) { server.StartTickersOnce.Do(func() { - log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\" (via rate limiting)", utils.AnonymizeToken(req.GetToken())) + log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\" (via %s)", utils.AnonymizeToken(server.AikidoConfig.Token), source) cloud.StartAllTickers(server) rate_limiting.StartRateLimitingTicker(server) attack_wave_detection.StartAttackWaveTicker(server) }) - - log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) - return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil } func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestMetadataShutdown) (*emptypb.Empty, error) { @@ -180,23 +185,13 @@ func (s *GrpcServer) OnMonitoredUserAgentMatch(ctx context.Context, req *protos. return &emptypb.Empty{}, nil } -func (s *GrpcServer) EnsureTickersStarted(ctx context.Context, req *protos.ServerIdentifier) (*emptypb.Empty, error) { +func (s *GrpcServer) StartTickers(ctx context.Context, req *protos.ServerIdentifier) (*emptypb.Empty, error) { server := globals.GetServer(ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()}) if server == nil { return &emptypb.Empty{}, nil } - // Start all tickers on first request (exactly once via sync.Once) - // This is called explicitly from the request processor on the first request - server.StartTickersOnce.Do(func() { - log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\"", utils.AnonymizeToken(req.GetToken())) - cloud.StartAllTickers(server) - rate_limiting.StartRateLimitingTicker(server) - attack_wave_detection.StartAttackWaveTicker(server) - }) - - // Mark that this server has received traffic - atomic.StoreUint32(&server.GotTraffic, 1) + startTickersOnce(server, "explicit request") return &emptypb.Empty{}, nil } diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index 072fc136b..f58f078e0 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -112,8 +112,8 @@ func OnPackages(server *ServerData, packages map[string]string) { log.Debugf(nil, "Packages sent via socket!") } -/* Ensure tickers are started for the server (called on first request) */ -func EnsureTickersStarted(server *ServerData) { +/* Start tickers on the agent side (lightweight call for simple requests) */ +func StartTickers(server *ServerData) { if client == nil { return } @@ -121,13 +121,13 @@ func EnsureTickersStarted(server *ServerData) { ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) defer cancel() - _, err := client.EnsureTickersStarted(ctx, &protos.ServerIdentifier{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) + _, err := client.StartTickers(ctx, &protos.ServerIdentifier{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) if err != nil { - log.Warnf(nil, "Could not ensure tickers started: %v", err) + log.Warnf(nil, "Could not start tickers: %v", err) return } - log.Debugf(nil, "Tickers initialization requested via socket!") + log.Debugf(nil, "Tickers start requested via socket!") } /* Send request metadata (route & method) to Aikido Agent via gRPC */ diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 212b2a47e..a7947b6a6 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -21,9 +21,7 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - // Ensure tickers are started on first request from a non-bypassed IP - // This ensures heartbeats and other background operations start when we have actual traffic to monitor - grpc.EnsureTickersStarted(params.Server) + grpc.StartTickers(params.Server) log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs @@ -31,6 +29,7 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { params.IsWebScanner = webscanner.IsWebScanner(params.Method, params.Route, params.QueryParsed) } params.ShouldDiscoverRoute = utils.ShouldDiscoverRoute(params.StatusCode, params.Route, params.Method) + if !params.RateLimited && !params.ShouldDiscoverRoute && !params.IsWebScanner { return } From 1a124977b51c6c0db4c436e01b1d106911a5a6cd Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 15:48:33 +0000 Subject: [PATCH 140/170] ++ --- lib/ipc.proto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ipc.proto b/lib/ipc.proto index f8f98e8d6..78ff1c084 100644 --- a/lib/ipc.proto +++ b/lib/ipc.proto @@ -19,7 +19,7 @@ service Aikido { rpc OnMiddlewareInstalled(MiddlewareInstalledInfo) returns (google.protobuf.Empty); rpc OnMonitoredIpMatch(MonitoredIpMatch) returns (google.protobuf.Empty); rpc OnMonitoredUserAgentMatch(MonitoredUserAgentMatch) returns (google.protobuf.Empty); - rpc EnsureTickersStarted(ServerIdentifier) returns (google.protobuf.Empty); + rpc StartTickers(ServerIdentifier) returns (google.protobuf.Empty); } message Config { From 3ad616db7bebbf0c604640cf9d7f4d1bdc313367 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 16:11:41 +0000 Subject: [PATCH 141/170] ++ --- lib/request-processor/grpc/client.go | 2 +- lib/request-processor/handle_request_metadata.go | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index f58f078e0..a2990032d 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -114,7 +114,7 @@ func OnPackages(server *ServerData, packages map[string]string) { /* Start tickers on the agent side (lightweight call for simple requests) */ func StartTickers(server *ServerData) { - if client == nil { + if client == nil || server == nil { return } diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index a7947b6a6..f3d9245b6 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -21,7 +21,9 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - grpc.StartTickers(params.Server) + if params.Server != nil { + grpc.StartTickers(params.Server) + } log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs From 0493aa95ab480ccc6b1a755dfb874ac7b1aa00cb Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 16:23:13 +0000 Subject: [PATCH 142/170] -_- --- lib/request-processor/aikido_types/handle.go | 1 - lib/request-processor/handle_request_metadata.go | 5 ----- 2 files changed, 6 deletions(-) diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index 7b62ff3cb..b3b17d9dc 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -25,5 +25,4 @@ type RequestShutdownParams struct { IsWebScanner bool ShouldDiscoverRoute bool IsIpBypassed bool - Server *ServerData } diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index f3d9245b6..00936b59b 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -21,10 +21,6 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - if params.Server != nil { - grpc.StartTickers(params.Server) - } - log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs if !params.IsIpBypassed { @@ -63,7 +59,6 @@ func OnPostRequest(inst *instance.RequestProcessorInstance) string { QueryParsed: context.GetQueryParsed(inst), IsIpBypassed: context.IsIpBypassed(inst), APISpec: api_discovery.GetApiInfo(inst, inst.GetCurrentServer()), - Server: server, } context.Clear(inst) From 5bb47a237bbeb11660f7d2ba517dcae4728f3fc8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 17:18:51 +0000 Subject: [PATCH 143/170] Start tickers in all the runtime handlers --- lib/agent/grpc/server.go | 31 +++++++++++++++++--------- lib/ipc.proto | 6 ----- lib/php-extension/Aikido.cpp | 1 + lib/php-extension/Environment.cpp | 1 + lib/php-extension/Handle.cpp | 26 ++++++++++++++++----- lib/php-extension/include/php_aikido.h | 1 + lib/request-processor/grpc/client.go | 18 --------------- 7 files changed, 43 insertions(+), 41 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 2529dcea9..b6933e449 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -59,6 +59,9 @@ func (s *GrpcServer) OnDomain(ctx context.Context, req *protos.Domain) (*emptypb if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "domain event") + log.Debugf(server.Logger, "Received domain: %s:%d", req.GetDomain(), req.GetPort()) storeDomain(server, req.GetDomain(), req.GetPort()) return &emptypb.Empty{}, nil @@ -123,6 +126,9 @@ func (s *GrpcServer) OnUser(ctx context.Context, req *protos.User) (*emptypb.Emp if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "user event") + log.Debugf(server.Logger, "Received user event: %s", req.GetId()) go onUserEvent(server, req.GetId(), req.GetUsername(), req.GetIp()) return &emptypb.Empty{}, nil @@ -133,6 +139,9 @@ func (s *GrpcServer) OnAttackDetected(ctx context.Context, req *protos.AttackDet if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "attack detection") + cloud.SendAttackDetectedEvent(server, req, "detected_attack") storeAttackStats(server, req) return &emptypb.Empty{}, nil @@ -143,6 +152,9 @@ func (s *GrpcServer) OnMonitoredSinkStats(ctx context.Context, req *protos.Monit if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "sink stats") + storeSinkStats(server, req) return &emptypb.Empty{}, nil } @@ -152,6 +164,8 @@ func (s *GrpcServer) OnMiddlewareInstalled(ctx context.Context, req *protos.Midd if server == nil { return &emptypb.Empty{}, nil } + + // Note: Don't start tickers here - this is an initialization event, not runtime traffic log.Debugf(server.Logger, "Received MiddlewareInstalled") atomic.StoreUint32(&server.MiddlewareInstalled, 1) return &emptypb.Empty{}, nil @@ -162,6 +176,9 @@ func (s *GrpcServer) OnMonitoredIpMatch(ctx context.Context, req *protos.Monitor if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "monitored IP match") + log.Debugf(server.Logger, "Received MonitoredIpMatch: %v", req.GetLists()) server.StatsData.StatsMutex.Lock() @@ -176,6 +193,9 @@ func (s *GrpcServer) OnMonitoredUserAgentMatch(ctx context.Context, req *protos. if server == nil { return &emptypb.Empty{}, nil } + + startTickersOnce(server, "monitored user agent match") + log.Debugf(server.Logger, "Received MonitoredUserAgentMatch: %v", req.GetLists()) server.StatsData.StatsMutex.Lock() @@ -185,17 +205,6 @@ func (s *GrpcServer) OnMonitoredUserAgentMatch(ctx context.Context, req *protos. return &emptypb.Empty{}, nil } -func (s *GrpcServer) StartTickers(ctx context.Context, req *protos.ServerIdentifier) (*emptypb.Empty, error) { - server := globals.GetServer(ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()}) - if server == nil { - return &emptypb.Empty{}, nil - } - - startTickersOnce(server, "explicit request") - - return &emptypb.Empty{}, nil -} - var grpcServer *grpc.Server func StartServer(lis net.Listener) { diff --git a/lib/ipc.proto b/lib/ipc.proto index 78ff1c084..0155712d0 100644 --- a/lib/ipc.proto +++ b/lib/ipc.proto @@ -19,7 +19,6 @@ service Aikido { rpc OnMiddlewareInstalled(MiddlewareInstalledInfo) returns (google.protobuf.Empty); rpc OnMonitoredIpMatch(MonitoredIpMatch) returns (google.protobuf.Empty); rpc OnMonitoredUserAgentMatch(MonitoredUserAgentMatch) returns (google.protobuf.Empty); - rpc StartTickers(ServerIdentifier) returns (google.protobuf.Empty); } message Config { @@ -232,9 +231,4 @@ message MonitoredUserAgentMatch { string token = 1; int32 server_pid = 2; repeated string lists = 3; -} - -message ServerIdentifier { - string token = 1; - int32 server_pid = 2; } \ No newline at end of file diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 756049537..05dc5de9c 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -107,6 +107,7 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->collect_api_schema = false; aikido_globals->trust_proxy = false; aikido_globals->localhost_allowed_by_default = false; + aikido_globals->is_frankenphp = false; aikido_globals->report_stats_interval_to_agent = 0; aikido_globals->currentRequestStart = std::chrono::high_resolution_clock::time_point{}; aikido_globals->totalOverheadForCurrentRequest = 0; diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 5ad3911fa..2ec158e90 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -221,6 +221,7 @@ void LoadEnvironment() { AIKIDO_GLOBAL(trust_proxy) = GetEnvBool("AIKIDO_TRUST_PROXY", true); AIKIDO_GLOBAL(disk_logs) = GetEnvBool("AIKIDO_DISK_LOGS", false); AIKIDO_GLOBAL(sapi_name) = sapi_module.name; + AIKIDO_GLOBAL(is_frankenphp) = (AIKIDO_GLOBAL(sapi_name) == "frankenphp"); AIKIDO_GLOBAL(token) = GetEnvString("AIKIDO_TOKEN", ""); AIKIDO_GLOBAL(endpoint) = GetEnvString("AIKIDO_ENDPOINT", "https://guard.aikido.dev/"); AIKIDO_GLOBAL(config_endpoint) = GetEnvString("AIKIDO_REALTIME_ENDPOINT", "https://runtime.aikido.dev/"); diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index 358f2ce73..b930ba3e8 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -84,15 +84,29 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { return; } - if (IsAikidoDisabledOrBypassed()) { + // For FrankenPHP, only check AIKIDO_GLOBAL(disable), not IP bypass + // FrankenPHP has a race condition: IP bypass check reads $_SERVER via zend_is_auto_global_str() + // which triggers go_register_variables() → thread.getRequestContext() without mutex lock + // This can access thread.handler while it's being set, causing x86_64 segfault + // For other SAPIs (php-fpm, apache, etc.), the full check is safe and provides better performance + if (!AIKIDO_GLOBAL(is_frankenphp)) { + // Non-FrankenPHP: Full bypass check (optimal performance) + if (IsAikidoDisabledOrBypassed()) { + if (original_handler) { + original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); + } + if (AIKIDO_GLOBAL(disable) == true) { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + } else { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); + } + return; + } + } else if (AIKIDO_GLOBAL(disable) == true) { if (original_handler) { original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); } - if (AIKIDO_GLOBAL(disable) == true) { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); - } else { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); - } + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); return; } diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index bb19eac26..19a274054 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -35,6 +35,7 @@ bool collect_api_schema; bool trust_proxy; bool localhost_allowed_by_default; bool uses_symfony_http_foundation; // If true, method override is supported using X-HTTP-METHOD-OVERRIDE or _method query param +bool is_frankenphp; unsigned int report_stats_interval_to_agent; // Report once every X requests the collected stats to Agent std::chrono::high_resolution_clock::time_point currentRequestStart; uint64_t totalOverheadForCurrentRequest; diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index a2990032d..c5a833c86 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -112,24 +112,6 @@ func OnPackages(server *ServerData, packages map[string]string) { log.Debugf(nil, "Packages sent via socket!") } -/* Start tickers on the agent side (lightweight call for simple requests) */ -func StartTickers(server *ServerData) { - if client == nil || server == nil { - return - } - - ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) - defer cancel() - - _, err := client.StartTickers(ctx, &protos.ServerIdentifier{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) - if err != nil { - log.Warnf(nil, "Could not start tickers: %v", err) - return - } - - log.Debugf(nil, "Tickers start requested via socket!") -} - /* Send request metadata (route & method) to Aikido Agent via gRPC */ func GetRateLimitingStatus(inst *instance.RequestProcessorInstance, server *ServerData, method string, route string, routeParsed string, user string, ip string, rateLimitGroup string, timeout time.Duration) *protos.RateLimitingStatus { if client == nil || server == nil { From 4a376362a50dbfc08ddbbfa7c01303e426a61457 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 17:29:45 +0000 Subject: [PATCH 144/170] +++ --- lib/agent/grpc/server.go | 2 ++ lib/php-extension/Aikido.cpp | 2 +- lib/php-extension/Environment.cpp | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index b6933e449..61e74d073 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -95,6 +95,8 @@ func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestM return &emptypb.Empty{}, nil } + startTickersOnce(server, "request shutdown") + log.Debugf(server.Logger, "Received request metadata: %s %s %d %s %s %v", req.GetMethod(), req.GetRouteParsed(), req.GetStatusCode(), req.GetUser(), req.GetIp(), req.GetApiSpec()) if req.GetShouldDiscoverRoute() || req.GetRateLimited() { go storeTotalStats(server, req.GetRateLimited()) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 05dc5de9c..b7c9943e7 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -107,7 +107,7 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->collect_api_schema = false; aikido_globals->trust_proxy = false; aikido_globals->localhost_allowed_by_default = false; - aikido_globals->is_frankenphp = false; + aikido_globals->is_frankenphp = (strcmp(sapi_module.name, "frankenphp") == 0); aikido_globals->report_stats_interval_to_agent = 0; aikido_globals->currentRequestStart = std::chrono::high_resolution_clock::time_point{}; aikido_globals->totalOverheadForCurrentRequest = 0; diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 2ec158e90..5ad3911fa 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -221,7 +221,6 @@ void LoadEnvironment() { AIKIDO_GLOBAL(trust_proxy) = GetEnvBool("AIKIDO_TRUST_PROXY", true); AIKIDO_GLOBAL(disk_logs) = GetEnvBool("AIKIDO_DISK_LOGS", false); AIKIDO_GLOBAL(sapi_name) = sapi_module.name; - AIKIDO_GLOBAL(is_frankenphp) = (AIKIDO_GLOBAL(sapi_name) == "frankenphp"); AIKIDO_GLOBAL(token) = GetEnvString("AIKIDO_TOKEN", ""); AIKIDO_GLOBAL(endpoint) = GetEnvString("AIKIDO_ENDPOINT", "https://guard.aikido.dev/"); AIKIDO_GLOBAL(config_endpoint) = GetEnvString("AIKIDO_REALTIME_ENDPOINT", "https://runtime.aikido.dev/"); From abccd551c3d15805688f158598914673bc8a2dc8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 17:49:28 +0000 Subject: [PATCH 145/170] fix --- lib/php-extension/Handle.cpp | 26 +++++--------------- lib/php-extension/HandleBypassedIp.cpp | 9 +++++++ lib/php-extension/include/RequestProcessor.h | 1 + 3 files changed, 16 insertions(+), 20 deletions(-) diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index b930ba3e8..358f2ce73 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -84,29 +84,15 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { return; } - // For FrankenPHP, only check AIKIDO_GLOBAL(disable), not IP bypass - // FrankenPHP has a race condition: IP bypass check reads $_SERVER via zend_is_auto_global_str() - // which triggers go_register_variables() → thread.getRequestContext() without mutex lock - // This can access thread.handler while it's being set, causing x86_64 segfault - // For other SAPIs (php-fpm, apache, etc.), the full check is safe and provides better performance - if (!AIKIDO_GLOBAL(is_frankenphp)) { - // Non-FrankenPHP: Full bypass check (optimal performance) - if (IsAikidoDisabledOrBypassed()) { - if (original_handler) { - original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); - } - if (AIKIDO_GLOBAL(disable) == true) { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); - } else { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); - } - return; - } - } else if (AIKIDO_GLOBAL(disable) == true) { + if (IsAikidoDisabledOrBypassed()) { if (original_handler) { original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); } - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + if (AIKIDO_GLOBAL(disable) == true) { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + } else { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); + } return; } diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index b4e26c155..d4a23f519 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -25,6 +25,15 @@ bool IsAikidoDisabledOrBypassed() { return true; } + // For FrankenPHP, only check IP bypass after request is initialized + // FrankenPHP has a race: IP bypass check reads $_SERVER via zend_is_auto_global_str() + // which triggers go_register_variables() → thread.getRequestContext() without mutex lock + // This can access thread.handler while it's being set during early request setup + // After RINIT completes, requestInitialized=true and $_SERVER access is safe + if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { + return false; + } + if (!AIKIDO_GLOBAL(checkedIpBypass)) { AIKIDO_GLOBAL(checkedIpBypass) = true; InitIpBypassCheck(); diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index 2fe0adfa6..c75f8eb8b 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -50,6 +50,7 @@ class RequestProcessor { void LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware); void RequestShutdown(); void Uninit(); + bool IsRequestInitialized() const { return requestInitialized; } ~RequestProcessor(); }; From fb64927a562045ece47266514abc92a829cd2521 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Tue, 30 Dec 2025 23:29:44 +0000 Subject: [PATCH 146/170] test --- lib/php-extension/Environment.cpp | 17 +++++++++++++++++ lib/php-extension/Packages.cpp | 6 ++++++ 2 files changed, 23 insertions(+) diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 5ad3911fa..f3849df8f 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -39,6 +39,13 @@ bool LoadLaravelEnvFile() { return true; } + // For FrankenPHP, skip if request is not initialized yet + // Accessing $_SERVER before RINIT triggers a race condition + if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { + AIKIDO_LOG_DEBUG("Skipping Laravel .env load - request not initialized yet in FrankenPHP\n"); + return false; + } + std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); AIKIDO_LOG_DEBUG("Trying to load .env file, starting with DOCUMENT_ROOT: %s\n", docRoot.c_str()); if (docRoot.empty()) { @@ -100,12 +107,22 @@ bool LoadLaravelEnvFile() { /* FrankenPHP's Caddyfile env directive only populates $_SERVER, not the process environment. This function reads environment variables from $_SERVER for FrankenPHP compatibility. + + IMPORTANT: Can only be called after RINIT (request initialization) in FrankenPHP! + Calling this before RINIT triggers go_register_variables() race condition causing segfault on x86_64. */ std::string GetFrankenEnvVariable(const std::string& env_key) { if (AIKIDO_GLOBAL(sapi_name) != "frankenphp") { return ""; } + // For FrankenPHP, skip if request is not initialized yet + // Accessing $_SERVER before RINIT triggers a race condition in go_register_variables() + if (!AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { + AIKIDO_LOG_DEBUG("franken_env[%s] = (skipped - request not initialized yet)\n", env_key.c_str()); + return ""; + } + // Force $_SERVER autoglobal to be initialized (it's lazily loaded in PHP) // This is CRITICAL in ZTS mode to ensure each thread gets request-specific $_SERVER values zend_is_auto_global_str(ZEND_STRL("_SERVER")); diff --git a/lib/php-extension/Packages.cpp b/lib/php-extension/Packages.cpp index 19662ff72..4135faa11 100644 --- a/lib/php-extension/Packages.cpp +++ b/lib/php-extension/Packages.cpp @@ -60,6 +60,12 @@ std::string GetComposerPackageVersion(const std::string& version) { unordered_map GetComposerPackages() { unordered_map packages; + // For FrankenPHP, skip if request is not initialized yet + // Accessing $_SERVER before RINIT triggers a race condition + if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { + return packages; + } + std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); if (docRoot.empty()) { return packages; From 860bb8da182bfe874de0235a516af30b446d21da Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 07:48:09 +0000 Subject: [PATCH 147/170] Revert "fix" This reverts commit abccd551c3d15805688f158598914673bc8a2dc8. --- lib/php-extension/Handle.cpp | 26 +++++++++++++++----- lib/php-extension/HandleBypassedIp.cpp | 9 ------- lib/php-extension/include/RequestProcessor.h | 1 - 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index 358f2ce73..b930ba3e8 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -84,15 +84,29 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { return; } - if (IsAikidoDisabledOrBypassed()) { + // For FrankenPHP, only check AIKIDO_GLOBAL(disable), not IP bypass + // FrankenPHP has a race condition: IP bypass check reads $_SERVER via zend_is_auto_global_str() + // which triggers go_register_variables() → thread.getRequestContext() without mutex lock + // This can access thread.handler while it's being set, causing x86_64 segfault + // For other SAPIs (php-fpm, apache, etc.), the full check is safe and provides better performance + if (!AIKIDO_GLOBAL(is_frankenphp)) { + // Non-FrankenPHP: Full bypass check (optimal performance) + if (IsAikidoDisabledOrBypassed()) { + if (original_handler) { + original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); + } + if (AIKIDO_GLOBAL(disable) == true) { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + } else { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); + } + return; + } + } else if (AIKIDO_GLOBAL(disable) == true) { if (original_handler) { original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); } - if (AIKIDO_GLOBAL(disable) == true) { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); - } else { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); - } + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); return; } diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index d4a23f519..b4e26c155 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -25,15 +25,6 @@ bool IsAikidoDisabledOrBypassed() { return true; } - // For FrankenPHP, only check IP bypass after request is initialized - // FrankenPHP has a race: IP bypass check reads $_SERVER via zend_is_auto_global_str() - // which triggers go_register_variables() → thread.getRequestContext() without mutex lock - // This can access thread.handler while it's being set during early request setup - // After RINIT completes, requestInitialized=true and $_SERVER access is safe - if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { - return false; - } - if (!AIKIDO_GLOBAL(checkedIpBypass)) { AIKIDO_GLOBAL(checkedIpBypass) = true; InitIpBypassCheck(); diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index c75f8eb8b..2fe0adfa6 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -50,7 +50,6 @@ class RequestProcessor { void LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware); void RequestShutdown(); void Uninit(); - bool IsRequestInitialized() const { return requestInitialized; } ~RequestProcessor(); }; From 1ecc87517b8795bcac30a8f51171eb7a7b326d08 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 07:48:11 +0000 Subject: [PATCH 148/170] Revert "test" This reverts commit fb64927a562045ece47266514abc92a829cd2521. --- lib/php-extension/Environment.cpp | 17 ----------------- lib/php-extension/Packages.cpp | 6 ------ 2 files changed, 23 deletions(-) diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index f3849df8f..5ad3911fa 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -39,13 +39,6 @@ bool LoadLaravelEnvFile() { return true; } - // For FrankenPHP, skip if request is not initialized yet - // Accessing $_SERVER before RINIT triggers a race condition - if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { - AIKIDO_LOG_DEBUG("Skipping Laravel .env load - request not initialized yet in FrankenPHP\n"); - return false; - } - std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); AIKIDO_LOG_DEBUG("Trying to load .env file, starting with DOCUMENT_ROOT: %s\n", docRoot.c_str()); if (docRoot.empty()) { @@ -107,22 +100,12 @@ bool LoadLaravelEnvFile() { /* FrankenPHP's Caddyfile env directive only populates $_SERVER, not the process environment. This function reads environment variables from $_SERVER for FrankenPHP compatibility. - - IMPORTANT: Can only be called after RINIT (request initialization) in FrankenPHP! - Calling this before RINIT triggers go_register_variables() race condition causing segfault on x86_64. */ std::string GetFrankenEnvVariable(const std::string& env_key) { if (AIKIDO_GLOBAL(sapi_name) != "frankenphp") { return ""; } - // For FrankenPHP, skip if request is not initialized yet - // Accessing $_SERVER before RINIT triggers a race condition in go_register_variables() - if (!AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { - AIKIDO_LOG_DEBUG("franken_env[%s] = (skipped - request not initialized yet)\n", env_key.c_str()); - return ""; - } - // Force $_SERVER autoglobal to be initialized (it's lazily loaded in PHP) // This is CRITICAL in ZTS mode to ensure each thread gets request-specific $_SERVER values zend_is_auto_global_str(ZEND_STRL("_SERVER")); diff --git a/lib/php-extension/Packages.cpp b/lib/php-extension/Packages.cpp index 4135faa11..19662ff72 100644 --- a/lib/php-extension/Packages.cpp +++ b/lib/php-extension/Packages.cpp @@ -60,12 +60,6 @@ std::string GetComposerPackageVersion(const std::string& version) { unordered_map GetComposerPackages() { unordered_map packages; - // For FrankenPHP, skip if request is not initialized yet - // Accessing $_SERVER before RINIT triggers a race condition - if (AIKIDO_GLOBAL(is_frankenphp) && !AIKIDO_GLOBAL(requestProcessor).IsRequestInitialized()) { - return packages; - } - std::string docRoot = AIKIDO_GLOBAL(server).GetVar("DOCUMENT_ROOT"); if (docRoot.empty()) { return packages; From 69412260a312d5eca4d4a4eb3c64a3ce4b2886c6 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 07:53:34 +0000 Subject: [PATCH 149/170] No longer need is_frankenphp --- lib/php-extension/Aikido.cpp | 1 - lib/php-extension/Handle.cpp | 26 ++++++-------------------- lib/php-extension/include/php_aikido.h | 1 - 3 files changed, 6 insertions(+), 22 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index b7c9943e7..756049537 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -107,7 +107,6 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->collect_api_schema = false; aikido_globals->trust_proxy = false; aikido_globals->localhost_allowed_by_default = false; - aikido_globals->is_frankenphp = (strcmp(sapi_module.name, "frankenphp") == 0); aikido_globals->report_stats_interval_to_agent = 0; aikido_globals->currentRequestStart = std::chrono::high_resolution_clock::time_point{}; aikido_globals->totalOverheadForCurrentRequest = 0; diff --git a/lib/php-extension/Handle.cpp b/lib/php-extension/Handle.cpp index b930ba3e8..358f2ce73 100644 --- a/lib/php-extension/Handle.cpp +++ b/lib/php-extension/Handle.cpp @@ -84,29 +84,15 @@ ZEND_NAMED_FUNCTION(aikido_generic_handler) { return; } - // For FrankenPHP, only check AIKIDO_GLOBAL(disable), not IP bypass - // FrankenPHP has a race condition: IP bypass check reads $_SERVER via zend_is_auto_global_str() - // which triggers go_register_variables() → thread.getRequestContext() without mutex lock - // This can access thread.handler while it's being set, causing x86_64 segfault - // For other SAPIs (php-fpm, apache, etc.), the full check is safe and provides better performance - if (!AIKIDO_GLOBAL(is_frankenphp)) { - // Non-FrankenPHP: Full bypass check (optimal performance) - if (IsAikidoDisabledOrBypassed()) { - if (original_handler) { - original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); - } - if (AIKIDO_GLOBAL(disable) == true) { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); - } else { - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); - } - return; - } - } else if (AIKIDO_GLOBAL(disable) == true) { + if (IsAikidoDisabledOrBypassed()) { if (original_handler) { original_handler(INTERNAL_FUNCTION_PARAM_PASSTHRU); } - AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + if (AIKIDO_GLOBAL(disable) == true) { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because AIKIDO_DISABLE is set to 1!\n"); + } else { + AIKIDO_LOG_INFO("Aikido generic handler finished earlier because IP is bypassed!\n"); + } return; } diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index 19a274054..bb19eac26 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -35,7 +35,6 @@ bool collect_api_schema; bool trust_proxy; bool localhost_allowed_by_default; bool uses_symfony_http_foundation; // If true, method override is supported using X-HTTP-METHOD-OVERRIDE or _method query param -bool is_frankenphp; unsigned int report_stats_interval_to_agent; // Report once every X requests the collected stats to Agent std::chrono::high_resolution_clock::time_point currentRequestStart; uint64_t totalOverheadForCurrentRequest; From 2dbd2c4fb375dbf73276422498faff5ecad6c561 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 08:55:34 +0000 Subject: [PATCH 150/170] test --- lib/php-extension/Aikido.cpp | 6 +++++- lib/php-extension/PhpLifecycle.cpp | 5 +++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 756049537..341a27be5 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -4,7 +4,11 @@ ZEND_DECLARE_MODULE_GLOBALS(aikido) PHP_MINIT_FUNCTION(aikido) { - LoadEnvironment(); + // For FrankenPHP: Skip LoadEnvironment during MINIT (will be called in RINIT) + // For other SAPIs: Load environment during MINIT as normal + if (sapi_module.name != std::string("frankenphp")) { + LoadEnvironment(); + } AIKIDO_GLOBAL(logger).Init(); AIKIDO_LOG_INFO("MINIT started!\n"); diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index d579d5977..7b2a3d744 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -11,6 +11,11 @@ void PhpLifecycle::ModuleInit() { } void PhpLifecycle::RequestInit() { + // For FrankenPHP: Load environment during RINIT (skipped in MINIT) + if (AIKIDO_GLOBAL(sapi_name) == "frankenphp") { + LoadEnvironment(); + } + AIKIDO_GLOBAL(action).Reset(); AIKIDO_GLOBAL(requestCache).Reset(); AIKIDO_GLOBAL(requestProcessor).RequestInit(); From 169b53a0aab54cb72019dee6eb5a2e6f007acec9 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 09:13:42 +0000 Subject: [PATCH 151/170] Don't LoadEnvironment for FrankenPHP on MINIT --- lib/php-extension/PhpLifecycle.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index 7b2a3d744..5d0013117 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -11,13 +11,9 @@ void PhpLifecycle::ModuleInit() { } void PhpLifecycle::RequestInit() { - // For FrankenPHP: Load environment during RINIT (skipped in MINIT) - if (AIKIDO_GLOBAL(sapi_name) == "frankenphp") { - LoadEnvironment(); - } - AIKIDO_GLOBAL(action).Reset(); AIKIDO_GLOBAL(requestCache).Reset(); + AIKIDO_GLOBAL(requestProcessor).RequestInit(); AIKIDO_GLOBAL(checkedAutoBlock) = false; AIKIDO_GLOBAL(checkedShouldBlockRequest) = false; From 2040d1de348b877d1c007f935ada4e8222e5d443 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 09:45:01 +0000 Subject: [PATCH 152/170] Update MINIT function for FrankenPHP to set sapi_name and defer environment loading to RINIT --- lib/php-extension/Aikido.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index 341a27be5..c3b6c7ac3 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -4,9 +4,12 @@ ZEND_DECLARE_MODULE_GLOBALS(aikido) PHP_MINIT_FUNCTION(aikido) { - // For FrankenPHP: Skip LoadEnvironment during MINIT (will be called in RINIT) - // For other SAPIs: Load environment during MINIT as normal - if (sapi_module.name != std::string("frankenphp")) { + // For FrankenPHP: Set sapi_name but skip rest of LoadEnvironment during MINIT + // Full environment will be loaded in RINIT when Caddyfile env vars are available + if (sapi_module.name == std::string("frankenphp")) { + AIKIDO_GLOBAL(sapi_name) = sapi_module.name; + } else { + // For other SAPIs: Load environment during MINIT as normal LoadEnvironment(); } AIKIDO_GLOBAL(logger).Init(); From 956b54bdf0325f0b34055995c0e4313c644d2a40 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 10:30:02 +0000 Subject: [PATCH 153/170] Atomic server creation in request processor --- lib/agent/grpc/server.go | 16 ++++++++++++++-- lib/agent/server_utils/server.go | 12 +++++++++--- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 61e74d073..5cfda38f2 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -35,13 +35,25 @@ func (s *GrpcServer) OnConfig(ctx context.Context, req *protos.Config) (*emptypb return &emptypb.Empty{}, nil } - server := globals.GetServer(ServerKey{Token: token, ServerPID: req.GetServerPid()}) + serverKey := ServerKey{Token: token, ServerPID: req.GetServerPid()} + + globals.ServersMutex.Lock() + server := globals.Servers[serverKey] if server != nil { + globals.ServersMutex.Unlock() log.Debugf(server.Logger, "Server \"AIK_RUNTIME_***%s\" already exists, skipping config update (request processor PID: %d, server PID: %d)", utils.AnonymizeToken(token), req.GetRequestProcessorPid(), req.GetServerPid()) return &emptypb.Empty{}, nil } - server_utils.Register(ServerKey{Token: token, ServerPID: req.GetServerPid()}, req.GetRequestProcessorPid(), req) + // Server doesn't exist, create it while still holding the lock + log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", req.GetRequestProcessorPid(), utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) + server = NewServerData() + globals.Servers[serverKey] = server + globals.ServersMutex.Unlock() + + // Now configure the server (outside the lock to avoid holding it too long) + server_utils.ConfigureServer(server, req) + return &emptypb.Empty{}, nil } diff --git a/lib/agent/server_utils/server.go b/lib/agent/server_utils/server.go index c61fbeabd..f09738095 100644 --- a/lib/agent/server_utils/server.go +++ b/lib/agent/server_utils/server.go @@ -28,10 +28,9 @@ func storeConfig(server *ServerData, req *protos.Config) { server.AikidoConfig.CollectApiSchema = req.GetCollectApiSchema() } -func Register(serverKey ServerKey, requestProcessorPID int32, req *protos.Config) { - log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", requestProcessorPID, utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) +func ConfigureServer(server *ServerData, req *protos.Config) { + serverKey := ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()} - server := globals.CreateServer(serverKey) storeConfig(server, req) server.Logger = log.CreateLogger(utils.AnonymizeToken(serverKey.Token), server.AikidoConfig.LogLevel, server.AikidoConfig.DiskLogs) @@ -47,6 +46,13 @@ func Register(serverKey ServerKey, requestProcessorPID int32, req *protos.Config } } +func Register(serverKey ServerKey, requestProcessorPID int32, req *protos.Config) { + log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", requestProcessorPID, utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) + + server := globals.CreateServer(serverKey) + ConfigureServer(server, req) +} + func Unregister(serverKey ServerKey) { log.Infof(log.MainLogger, "Unregistering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) server := globals.GetServer(serverKey) From d0ff6b37dd2285c7a38f2ede612869434e2466fc Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 11:00:53 +0000 Subject: [PATCH 154/170] Refactor server registration to avoid holding mutex during configuration. The server is now added to the global map after configuration, improving concurrency handling. --- lib/agent/grpc/server.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 5cfda38f2..fcee4f122 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -45,15 +45,15 @@ func (s *GrpcServer) OnConfig(ctx context.Context, req *protos.Config) (*emptypb return &emptypb.Empty{}, nil } - // Server doesn't exist, create it while still holding the lock log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", req.GetRequestProcessorPid(), utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) server = NewServerData() - globals.Servers[serverKey] = server - globals.ServersMutex.Unlock() - // Now configure the server (outside the lock to avoid holding it too long) server_utils.ConfigureServer(server, req) + // Now add the fully configured server to the map + globals.Servers[serverKey] = server + globals.ServersMutex.Unlock() + return &emptypb.Empty{}, nil } From 4014d1fcdc8525bb0d455edcf901450255c3d8d3 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Wed, 31 Dec 2025 11:12:09 +0000 Subject: [PATCH 155/170] Refactor server configuration process by separating logger initialization and completion steps --- lib/agent/grpc/server.go | 9 +++++---- lib/agent/server_utils/server.go | 16 +++++++++++----- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index fcee4f122..9568fcf93 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -38,8 +38,8 @@ func (s *GrpcServer) OnConfig(ctx context.Context, req *protos.Config) (*emptypb serverKey := ServerKey{Token: token, ServerPID: req.GetServerPid()} globals.ServersMutex.Lock() - server := globals.Servers[serverKey] - if server != nil { + server, exists := globals.Servers[serverKey] + if exists { globals.ServersMutex.Unlock() log.Debugf(server.Logger, "Server \"AIK_RUNTIME_***%s\" already exists, skipping config update (request processor PID: %d, server PID: %d)", utils.AnonymizeToken(token), req.GetRequestProcessorPid(), req.GetServerPid()) return &emptypb.Empty{}, nil @@ -48,12 +48,13 @@ func (s *GrpcServer) OnConfig(ctx context.Context, req *protos.Config) (*emptypb log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", req.GetRequestProcessorPid(), utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) server = NewServerData() - server_utils.ConfigureServer(server, req) + server_utils.InitializeServerLogger(server, req) - // Now add the fully configured server to the map globals.Servers[serverKey] = server globals.ServersMutex.Unlock() + server_utils.CompleteServerConfiguration(server, serverKey, req) + return &emptypb.Empty{}, nil } diff --git a/lib/agent/server_utils/server.go b/lib/agent/server_utils/server.go index f09738095..90dec1c17 100644 --- a/lib/agent/server_utils/server.go +++ b/lib/agent/server_utils/server.go @@ -28,16 +28,16 @@ func storeConfig(server *ServerData, req *protos.Config) { server.AikidoConfig.CollectApiSchema = req.GetCollectApiSchema() } -func ConfigureServer(server *ServerData, req *protos.Config) { - serverKey := ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()} - +func InitializeServerLogger(server *ServerData, req *protos.Config) { storeConfig(server, req) + serverKey := ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()} server.Logger = log.CreateLogger(utils.AnonymizeToken(serverKey.Token), server.AikidoConfig.LogLevel, server.AikidoConfig.DiskLogs) + atomic.StoreInt64(&server.LastConnectionTime, utils.GetTime()) +} +func CompleteServerConfiguration(server *ServerData, serverKey ServerKey, req *protos.Config) { log.InfofMainAndServer(server.Logger, "Server \"AIK_RUNTIME_***%s\" (server PID: %d) registered successfully!", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) - atomic.StoreInt64(&server.LastConnectionTime, utils.GetTime()) - cloud.Init(server) if globals.IsPastDeletedServer(serverKey) { log.InfofMainAndServer(server.Logger, "Server \"AIK_RUNTIME_***%s\" (server PID: %d) was registered before for this server PID, but deleted due to inactivity! Skipping start event as it was sent before...", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) @@ -46,6 +46,12 @@ func ConfigureServer(server *ServerData, req *protos.Config) { } } +func ConfigureServer(server *ServerData, req *protos.Config) { + serverKey := ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()} + InitializeServerLogger(server, req) + CompleteServerConfiguration(server, serverKey, req) +} + func Register(serverKey ServerKey, requestProcessorPID int32, req *protos.Config) { log.Infof(log.MainLogger, "Client (request processor PID: %d) connected. Registering server \"AIK_RUNTIME_***%s\" (server PID: %d)...", requestProcessorPID, utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) From 288700b89b6df8595d8e0841c211d9fb95d4299d Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:07:53 +0000 Subject: [PATCH 156/170] Remove global maps and include specific members into instance(that is passed everywhere). Update gRPC function signatures to accept instance pointers directly. --- lib/php-extension/PhpWrappers.cpp | 1 + lib/request-processor/aikido_types/handle.go | 22 ----- lib/request-processor/attack/attack.go | 7 +- .../context/context_for_unit_tests.go | 12 +-- .../context/event_context.go | 14 +-- .../context/request_context.go | 21 +++-- lib/request-processor/globals/globals.go | 37 -------- lib/request-processor/grpc/client.go | 91 ++++++++++++++----- .../handle_blocking_request.go | 13 +-- .../handle_request_metadata.go | 12 +-- lib/request-processor/handle_urls.go | 16 +--- lib/request-processor/handle_user_event.go | 7 +- lib/request-processor/instance/wrapper.go | 51 +++++++++++ lib/request-processor/log/log.go | 61 ------------- lib/request-processor/main.go | 3 +- 15 files changed, 166 insertions(+), 202 deletions(-) diff --git a/lib/php-extension/PhpWrappers.cpp b/lib/php-extension/PhpWrappers.cpp index 927289e40..6572a598c 100644 --- a/lib/php-extension/PhpWrappers.cpp +++ b/lib/php-extension/PhpWrappers.cpp @@ -17,6 +17,7 @@ bool CallPhpEcho(std::string message) { bool CallPhpFunction(std::string function_name, unsigned int params_number, zval* params, zval* return_value, zval* object) { if (!object && !zend_hash_str_exists(CG(function_table), function_name.c_str(), function_name.size())) { + AIKIDO_LOG_INFO("Function name '%s' does not exist!\n", function_name.c_str()); return false; } diff --git a/lib/request-processor/aikido_types/handle.go b/lib/request-processor/aikido_types/handle.go index b3b17d9dc..e14f23eb3 100644 --- a/lib/request-processor/aikido_types/handle.go +++ b/lib/request-processor/aikido_types/handle.go @@ -1,28 +1,6 @@ package aikido_types -import "main/ipc/protos" - type Method struct { ClassName string MethodName string } - -type RequestShutdownParams struct { - ThreadID uint64 - Token string - Method string - Route string - RouteParsed string - StatusCode int - User string - UserAgent string - IP string - Url string - RateLimitGroup string - APISpec *protos.APISpec - RateLimited bool - QueryParsed map[string]interface{} - IsWebScanner bool - ShouldDiscoverRoute bool - IsIpBypassed bool -} diff --git a/lib/request-processor/attack/attack.go b/lib/request-processor/attack/attack.go index 83871dcfc..9d365dd48 100644 --- a/lib/request-processor/attack/attack.go +++ b/lib/request-processor/attack/attack.go @@ -34,12 +34,9 @@ func GetHeadersProto(inst *instance.RequestProcessorInstance) []*protos.Header { /* Construct the AttackDetected protobuf structure to be sent via gRPC to the Agent */ func GetAttackDetectedProto(res utils.InterceptorResult, inst *instance.RequestProcessorInstance) *protos.AttackDetected { - token := inst.GetCurrentToken() - server := inst.GetCurrentServer() - serverPID := context.GetServerPID() return &protos.AttackDetected{ - Token: token, + Token: inst.GetCurrentToken(), ServerPid: serverPID, Request: &protos.Request{ Method: context.GetMethod(inst), @@ -55,7 +52,7 @@ func GetAttackDetectedProto(res utils.InterceptorResult, inst *instance.RequestP Kind: string(res.Kind), Operation: res.Operation, Module: context.GetModule(inst), - Blocked: utils.IsBlockingEnabled(server), + Blocked: utils.IsBlockingEnabled(inst.GetCurrentServer()), Source: res.Source, Path: res.PathToPayload, Stack: context.GetStackTrace(inst), diff --git a/lib/request-processor/context/context_for_unit_tests.go b/lib/request-processor/context/context_for_unit_tests.go index 1d689e125..3a4821d3f 100644 --- a/lib/request-processor/context/context_for_unit_tests.go +++ b/lib/request-processor/context/context_for_unit_tests.go @@ -8,7 +8,6 @@ import ( "encoding/json" "fmt" . "main/aikido_types" - "main/globals" "main/instance" ) @@ -62,18 +61,17 @@ func LoadForUnitTests(context map[string]string) *instance.RequestProcessorInsta inst: mockInst, Callback: UnitTestsCallback, } - globals.ContextData.Store(tid, ctx) - globals.ContextInstances.Store(tid, nil) + mockInst.SetRequestContext(ctx) + mockInst.SetContextInstance(nil) + mockInst.SetEventContext(&EventContextData{}) TestContext = context return mockInst } func UnloadForUnitTests() { - tid := getThreadID() - globals.ContextData.Delete(tid) - globals.ContextInstances.Delete(tid) - globals.EventContextData.Delete(tid) + // Note: In the new design, contexts are stored per-instance, not globally + // The test instance will be garbage collected when no longer referenced TestServer = nil TestContext = nil } diff --git a/lib/request-processor/context/event_context.go b/lib/request-processor/context/event_context.go index b52358f7f..b4bed462d 100644 --- a/lib/request-processor/context/event_context.go +++ b/lib/request-processor/context/event_context.go @@ -3,7 +3,6 @@ package context // #include "../../API.h" import "C" import ( - "main/globals" "main/instance" "main/utils" ) @@ -21,18 +20,19 @@ func getEventContext(inst *instance.RequestProcessorInstance) *EventContextData if inst == nil { return nil } - tid := inst.GetThreadID() - // Create new event context if it doesn't exist - newCtx := &EventContextData{} - return globals.LoadOrStoreInThreadStorage(tid, newCtx, &globals.EventContextData) + + ctx := inst.GetEventContext() + if ctx == nil { + return nil + } + return ctx.(*EventContextData) } func ResetEventContext(inst *instance.RequestProcessorInstance) bool { if inst == nil { return false } - tid := inst.GetThreadID() - globals.StoreInThreadStorage(tid, &EventContextData{}, &globals.EventContextData) + inst.SetEventContext(&EventContextData{}) return true } diff --git a/lib/request-processor/context/request_context.go b/lib/request-processor/context/request_context.go index 204284b9e..ce1d6c4d2 100644 --- a/lib/request-processor/context/request_context.go +++ b/lib/request-processor/context/request_context.go @@ -58,22 +58,29 @@ func Init(instPtr unsafe.Pointer, callback CallbackFunction) bool { return false } - tid := inst.GetThreadID() - - globals.ContextInstances.Store(tid, instPtr) + inst.SetContextInstance(instPtr) ctx := &RequestContextData{ - inst: inst, // Store instance in context for fast access + inst: inst, Callback: callback, } - globals.ContextData.Store(tid, ctx) + inst.SetRequestContext(ctx) + + // Initialize EventContext upfront + inst.SetEventContext(&EventContextData{}) return true } func GetContext(inst *instance.RequestProcessorInstance) *RequestContextData { - tid := inst.GetThreadID() - return globals.GetFromThreadStorage[*RequestContextData](tid, &globals.ContextData) + if inst == nil { + return nil + } + ctx := inst.GetRequestContext() + if ctx == nil { + return nil + } + return ctx.(*RequestContextData) } func (ctx *RequestContextData) GetInstance() *instance.RequestProcessorInstance { diff --git a/lib/request-processor/globals/globals.go b/lib/request-processor/globals/globals.go index 13eec8b45..9e8fcf761 100644 --- a/lib/request-processor/globals/globals.go +++ b/lib/request-processor/globals/globals.go @@ -20,14 +20,6 @@ var ServersMutex sync.RWMutex // =========================== // Per-Thread Context Storage // =========================== -// Thread-safe per-thread context storage for ZTS (Zend Thread Safety) -// Using pthread ID as key ensures each OS thread has isolated context - -var ( - ContextInstances sync.Map // map[uint64]unsafe.Pointer - pthread ID -> instance pointer - ContextData sync.Map // map[uint64]*RequestContextData - pthread ID -> request context - EventContextData sync.Map // map[uint64]*EventContextData - pthread ID -> event context -) // =========================== // Logging State @@ -100,32 +92,3 @@ const ( Version = "1.4.11" SocketPath = "/run/aikido-" + Version + "/aikido-agent.sock" ) - -func GetFromThreadStorage[T any](threadID uint64, storage *sync.Map) T { - if val, ok := storage.Load(threadID); ok { - return val.(T) - } - var zero T - return zero -} - -func StoreInThreadStorage(threadID uint64, data interface{}, storage *sync.Map) { - storage.Store(threadID, data) -} - -func LoadOrStoreInThreadStorage[T any](threadID uint64, newData T, storage *sync.Map) T { - if val, ok := storage.Load(threadID); ok { - return val.(T) - } - storage.Store(threadID, newData) - return newData -} - -func DeleteFromThreadStorage(threadID uint64, storage *sync.Map) { - storage.Delete(threadID) -} - -func HasInThreadStorage(threadID uint64, storage *sync.Map) bool { - _, ok := storage.Load(threadID) - return ok -} diff --git a/lib/request-processor/grpc/client.go b/lib/request-processor/grpc/client.go index c5a833c86..8aa4c7cf8 100644 --- a/lib/request-processor/grpc/client.go +++ b/lib/request-processor/grpc/client.go @@ -17,6 +17,25 @@ import ( "google.golang.org/grpc/credentials/insecure" ) +type RequestShutdownParams struct { + Inst *instance.RequestProcessorInstance + Method string + Route string + RouteParsed string + StatusCode int + User string + UserAgent string + IP string + Url string + RateLimitGroup string + APISpec *protos.APISpec + RateLimited bool + QueryParsed map[string]interface{} + IsWebScanner bool + ShouldDiscoverRoute bool + IsIpBypassed bool +} + var conn *grpc.ClientConn var client protos.AikidoClient @@ -77,21 +96,26 @@ func SendAikidoConfig(server *ServerData) { } /* Send outgoing domain to Aikido Agent via gRPC */ -func OnDomain(threadID uint64, server *ServerData, domain string, port uint32) { +func OnDomain(inst *instance.RequestProcessorInstance, domain string, port uint32) { if client == nil { return } + server := inst.GetCurrentServer() + if server == nil { + return + } + ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) defer cancel() _, err := client.OnDomain(ctx, &protos.Domain{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Domain: domain, Port: port}) if err != nil { - log.WarnfWithThreadID(threadID, "Could not send domain %v: %v", domain, err) + log.Warnf(inst, "Could not send domain %v: %v", domain, err) return } - log.DebugfWithThreadID(threadID, "Domain sent via socket: %v:%v", domain, port) + log.Debugf(inst, "Domain sent via socket: %v:%v", domain, port) } /* Send packages to Aikido Agent via gRPC */ @@ -141,7 +165,7 @@ func OnRequestShutdown(params RequestShutdownParams) { defer cancel() _, err := client.OnRequestShutdown(ctx, &protos.RequestMetadataShutdown{ - Token: params.Token, + Token: params.Inst.GetCurrentToken(), ServerPid: globals.EnvironmentConfig.ServerPID, Method: params.Method, Route: params.Route, @@ -202,21 +226,26 @@ func GetCloudConfigForAllServers(timeout time.Duration) { } } -func OnUserEvent(threadID uint64, server *ServerData, id string, username string, ip string) { +func OnUserEvent(inst *instance.RequestProcessorInstance, id string, username string, ip string) { if client == nil { return } + server := inst.GetCurrentServer() + if server == nil { + return + } + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() _, err := client.OnUser(ctx, &protos.User{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Id: id, Username: username, Ip: ip}) if err != nil { - log.WarnfWithThreadID(threadID, "Could not send user event %v %v %v: %v", id, username, ip, err) + log.Warnf(inst, "Could not send user event %v %v %v: %v", id, username, ip, err) return } - log.DebugfWithThreadID(threadID, "User event sent via socket (%v %v %v)", id, username, ip) + log.Debugf(inst, "User event sent via socket (%v %v %v)", id, username, ip) } func OnAttackDetected(inst *instance.RequestProcessorInstance, attackDetected *protos.AttackDetected) { @@ -235,8 +264,13 @@ func OnAttackDetected(inst *instance.RequestProcessorInstance, attackDetected *p log.Debugf(inst, "Attack detected event sent via socket") } -func OnMonitoredSinkStats(threadID uint64, server *ServerData, sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { - if client == nil || server == nil { +func OnMonitoredSinkStats(inst *instance.RequestProcessorInstance, sink, kind string, attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total int32, timings []int64) { + if client == nil { + return + } + + server := inst.GetCurrentServer() + if server == nil { return } @@ -256,14 +290,19 @@ func OnMonitoredSinkStats(threadID uint64, server *ServerData, sink, kind string Timings: timings, }) if err != nil { - log.WarnfWithThreadID(threadID, "Could not send monitored sink stats event") + log.Warnf(inst, "Could not send monitored sink stats event") return } - log.DebugfWithThreadID(threadID, "Monitored sink stats for sink \"%s\" sent via socket", sink) + log.Debugf(inst, "Monitored sink stats for sink \"%s\" sent via socket", sink) } -func OnMiddlewareInstalled(threadID uint64, server *ServerData) { - if client == nil || server == nil { +func OnMiddlewareInstalled(inst *instance.RequestProcessorInstance) { + if client == nil { + return + } + + server := inst.GetCurrentServer() + if server == nil { return } @@ -272,17 +311,22 @@ func OnMiddlewareInstalled(threadID uint64, server *ServerData) { _, err := client.OnMiddlewareInstalled(ctx, &protos.MiddlewareInstalledInfo{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID}) if err != nil { - log.WarnfWithThreadID(threadID, "Could not call OnMiddlewareInstalled") + log.Warnf(inst, "Could not call OnMiddlewareInstalled") return } - log.DebugfWithThreadID(threadID, "OnMiddlewareInstalled sent via socket") + log.Debugf(inst, "OnMiddlewareInstalled sent via socket") } -func OnMonitoredIpMatch(threadID uint64, server *ServerData, lists []utils.IpListMatch) { +func OnMonitoredIpMatch(inst *instance.RequestProcessorInstance, lists []utils.IpListMatch) { if client == nil || len(lists) == 0 { return } + server := inst.GetCurrentServer() + if server == nil { + return + } + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() @@ -293,24 +337,29 @@ func OnMonitoredIpMatch(threadID uint64, server *ServerData, lists []utils.IpLis _, err := client.OnMonitoredIpMatch(ctx, &protos.MonitoredIpMatch{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Lists: protosLists}) if err != nil { - log.WarnfWithThreadID(threadID, "Could not call OnMonitoredIpMatch") + log.Warnf(inst, "Could not call OnMonitoredIpMatch") return } - log.DebugfWithThreadID(threadID, "OnMonitoredIpMatch sent via socket") + log.Debugf(inst, "OnMonitoredIpMatch sent via socket") } -func OnMonitoredUserAgentMatch(threadID uint64, server *ServerData, lists []string) { +func OnMonitoredUserAgentMatch(inst *instance.RequestProcessorInstance, lists []string) { if client == nil || len(lists) == 0 { return } + server := inst.GetCurrentServer() + if server == nil { + return + } + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() _, err := client.OnMonitoredUserAgentMatch(ctx, &protos.MonitoredUserAgentMatch{Token: server.AikidoConfig.Token, ServerPid: globals.EnvironmentConfig.ServerPID, Lists: lists}) if err != nil { - log.WarnfWithThreadID(threadID, "Could not call OnMonitoredUserAgentMatch") + log.Warnf(inst, "Could not call OnMonitoredUserAgentMatch") return } - log.DebugfWithThreadID(threadID, "OnMonitoredUserAgentMatch sent via socket") + log.Debugf(inst, "OnMonitoredUserAgentMatch sent via socket") } diff --git a/lib/request-processor/handle_blocking_request.go b/lib/request-processor/handle_blocking_request.go index 9b170ba47..3daeeea98 100644 --- a/lib/request-processor/handle_blocking_request.go +++ b/lib/request-processor/handle_blocking_request.go @@ -37,8 +37,7 @@ func OnGetBlockingStatus(inst *instance.RequestProcessorInstance) string { return "" } if !server.MiddlewareInstalled { - threadID := inst.GetThreadID() - go grpc.OnMiddlewareInstalled(threadID, server) + go grpc.OnMiddlewareInstalled(inst) server.MiddlewareInstalled = true } @@ -107,27 +106,25 @@ func OnGetAutoBlockingStatus(inst *instance.RequestProcessorInstance) string { return GetAction("exit", "blocked", "ip", "not in allow lists", ip, 403) } - threadID := inst.GetThreadID() - if ipMonitored, ipMonitoredMatches := utils.IsIpMonitored(inst, server, ip); ipMonitored { log.Infof(inst, "IP \"%s\" found in monitored lists: %v!", ip, ipMonitoredMatches) - go grpc.OnMonitoredIpMatch(threadID, server, ipMonitoredMatches) + go grpc.OnMonitoredIpMatch(inst, ipMonitoredMatches) } if ipBlocked, ipBlockedMatches := utils.IsIpBlocked(inst, server, ip); ipBlocked { log.Infof(inst, "IP \"%s\" found in blocked lists: %v!", ip, ipBlockedMatches) - go grpc.OnMonitoredIpMatch(threadID, server, ipBlockedMatches) + go grpc.OnMonitoredIpMatch(inst, ipBlockedMatches) return GetAction("exit", "blocked", "ip", ipBlockedMatches[0].Description, ip, 403) } if userAgentMonitored, userAgentMonitoredDescriptions := utils.IsUserAgentMonitored(server, userAgent); userAgentMonitored { log.Infof(inst, "User Agent \"%s\" found in monitored lists: %v!", userAgent, userAgentMonitoredDescriptions) - go grpc.OnMonitoredUserAgentMatch(threadID, server, userAgentMonitoredDescriptions) + go grpc.OnMonitoredUserAgentMatch(inst, userAgentMonitoredDescriptions) } if userAgentBlocked, userAgentBlockedDescriptions := utils.IsUserAgentBlocked(server, userAgent); userAgentBlocked { log.Infof(inst, "User Agent \"%s\" found in blocked lists: %v!", userAgent, userAgentBlockedDescriptions) - go grpc.OnMonitoredUserAgentMatch(threadID, server, userAgentBlockedDescriptions) + go grpc.OnMonitoredUserAgentMatch(inst, userAgentBlockedDescriptions) description := "unknown" if len(userAgentBlockedDescriptions) > 0 { diff --git a/lib/request-processor/handle_request_metadata.go b/lib/request-processor/handle_request_metadata.go index 00936b59b..b1578ec5e 100644 --- a/lib/request-processor/handle_request_metadata.go +++ b/lib/request-processor/handle_request_metadata.go @@ -1,7 +1,6 @@ package main import ( - . "main/aikido_types" "main/api_discovery" "main/context" "main/grpc" @@ -16,12 +15,12 @@ func OnPreRequest(inst *instance.RequestProcessorInstance) string { return "" } -func OnRequestShutdownReporting(params RequestShutdownParams) { +func OnRequestShutdownReporting(params grpc.RequestShutdownParams) { if params.Method == "" || params.Route == "" || params.StatusCode == 0 { return } - log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) + log.Info(params.Inst, "[RSHUTDOWN] Got request metadata: ", params.Method, " ", params.Route, " ", params.StatusCode) // Only detect web scanner activity for non-bypassed IPs if !params.IsIpBypassed { params.IsWebScanner = webscanner.IsWebScanner(params.Method, params.Route, params.QueryParsed) @@ -32,7 +31,7 @@ func OnRequestShutdownReporting(params RequestShutdownParams) { return } - log.InfoWithThreadID(params.ThreadID, "[RSHUTDOWN] Got API spec: ", params.APISpec) + log.Info(params.Inst, "[RSHUTDOWN] Got API spec: ", params.APISpec) grpc.OnRequestShutdown(params) } @@ -43,9 +42,8 @@ func OnPostRequest(inst *instance.RequestProcessorInstance) string { } if !context.IsIpBypassed(inst) { - params := RequestShutdownParams{ - ThreadID: inst.GetThreadID(), - Token: inst.GetCurrentToken(), + params := grpc.RequestShutdownParams{ + Inst: inst, Method: context.GetMethod(inst), Route: context.GetRoute(inst), RouteParsed: context.GetParsedRoute(inst), diff --git a/lib/request-processor/handle_urls.go b/lib/request-processor/handle_urls.go index a11bef975..3006f95c4 100644 --- a/lib/request-processor/handle_urls.go +++ b/lib/request-processor/handle_urls.go @@ -28,12 +28,8 @@ func OnPreOutgoingRequest(inst *instance.RequestProcessorInstance) string { // Check if the domain is blocked based on cloud configuration if !context.IsIpBypassed(inst) && ssrf.IsBlockedOutboundDomainWithInst(inst, hostname) { - server := inst.GetCurrentServer() // Blocked domains should also be reported to the agent. - if server != nil { - threadID := inst.GetThreadID() - go grpc.OnDomain(threadID, server, hostname, port) - } + go grpc.OnDomain(inst, hostname, port) message := fmt.Sprintf("Aikido firewall has blocked an outbound connection: %s(...) to %s", operation, html.EscapeString(hostname)) return attack.GetThrowAction(message, 500) } @@ -81,13 +77,9 @@ func OnPostOutgoingRequest(inst *instance.RequestProcessorInstance) string { log.Info(inst, "[AFTER] Got domain: ", hostname, " port: ", port) - server := inst.GetCurrentServer() - if server != nil { - threadID := inst.GetThreadID() - go grpc.OnDomain(threadID, server, hostname, port) - if effectiveHostname != hostname { - go grpc.OnDomain(threadID, server, effectiveHostname, effectivePort) - } + go grpc.OnDomain(inst, hostname, port) + if effectiveHostname != hostname { + go grpc.OnDomain(inst, effectiveHostname, effectivePort) } if context.IsEndpointProtectionTurnedOff(inst) { diff --git a/lib/request-processor/handle_user_event.go b/lib/request-processor/handle_user_event.go index e68f28c11..e371ec662 100644 --- a/lib/request-processor/handle_user_event.go +++ b/lib/request-processor/handle_user_event.go @@ -18,11 +18,6 @@ func OnUserEvent(inst *instance.RequestProcessorInstance) string { return "" } - server := inst.GetCurrentServer() - if server == nil { - return "" - } - threadID := inst.GetThreadID() // Capture threadID before goroutine - go grpc.OnUserEvent(threadID, server, id, username, ip) + go grpc.OnUserEvent(inst, id, username, ip) return "" } diff --git a/lib/request-processor/instance/wrapper.go b/lib/request-processor/instance/wrapper.go index 53931c78b..1cc88e2cc 100644 --- a/lib/request-processor/instance/wrapper.go +++ b/lib/request-processor/instance/wrapper.go @@ -16,6 +16,9 @@ type RequestProcessorInstance struct { ContextInstance unsafe.Pointer // For context callbacks ContextCallback unsafe.Pointer // C function pointer, must be per-instance in ZTS + RequestContext interface{} + EventContext interface{} + mu sync.Mutex // Only used when isZTS is true isZTS bool } @@ -105,3 +108,51 @@ func (i *RequestProcessorInstance) GetThreadID() uint64 { } return i.threadID } + +func (i *RequestProcessorInstance) SetRequestContext(ctx interface{}) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.RequestContext = ctx +} + +func (i *RequestProcessorInstance) GetRequestContext() interface{} { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.RequestContext +} + +func (i *RequestProcessorInstance) SetEventContext(ctx interface{}) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.EventContext = ctx +} + +func (i *RequestProcessorInstance) GetEventContext() interface{} { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.EventContext +} + +func (i *RequestProcessorInstance) SetContextInstance(ptr unsafe.Pointer) { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + i.ContextInstance = ptr +} + +func (i *RequestProcessorInstance) GetContextInstance() unsafe.Pointer { + if i.isZTS { + i.mu.Lock() + defer i.mu.Unlock() + } + return i.ContextInstance +} diff --git a/lib/request-processor/log/log.go b/lib/request-processor/log/log.go index a2110538b..b271991a3 100644 --- a/lib/request-processor/log/log.go +++ b/lib/request-processor/log/log.go @@ -127,67 +127,6 @@ func Errorf(inst *instance.RequestProcessorInstance, format string, args ...inte logMessagef(inst, globals.LogErrorLevel, format, args...) } -// Direct threadID logging (for goroutines where inst cannot be safely passed) -func logMessageWithThreadID(threadID uint64, level globals.LogLevel, args ...interface{}) { - globals.LogMutex.RLock() - lvl := globals.CurrentLogLevel - globals.LogMutex.RUnlock() - - if level >= lvl { - initLogFile() - formatter := &AikidoFormatter{} - message := fmt.Sprint(args...) - formattedMessage := formatter.Format(level, threadID, message) - globals.Logger.Print(formattedMessage) - } -} - -func logMessagefWithThreadID(threadID uint64, level globals.LogLevel, format string, args ...interface{}) { - globals.LogMutex.RLock() - lvl := globals.CurrentLogLevel - globals.LogMutex.RUnlock() - - if level >= lvl { - initLogFile() - formatter := &AikidoFormatter{} - message := fmt.Sprintf(format, args...) - formattedMessage := formatter.Format(level, threadID, message) - globals.Logger.Print(formattedMessage) - } -} - -func DebugWithThreadID(threadID uint64, args ...interface{}) { - logMessageWithThreadID(threadID, globals.LogDebugLevel, args...) -} - -func InfoWithThreadID(threadID uint64, args ...interface{}) { - logMessageWithThreadID(threadID, globals.LogInfoLevel, args...) -} - -func WarnWithThreadID(threadID uint64, args ...interface{}) { - logMessageWithThreadID(threadID, globals.LogWarnLevel, args...) -} - -func ErrorWithThreadID(threadID uint64, args ...interface{}) { - logMessageWithThreadID(threadID, globals.LogErrorLevel, args...) -} - -func DebugfWithThreadID(threadID uint64, format string, args ...interface{}) { - logMessagefWithThreadID(threadID, globals.LogDebugLevel, format, args...) -} - -func InfofWithThreadID(threadID uint64, format string, args ...interface{}) { - logMessagefWithThreadID(threadID, globals.LogInfoLevel, format, args...) -} - -func WarnfWithThreadID(threadID uint64, format string, args ...interface{}) { - logMessagefWithThreadID(threadID, globals.LogWarnLevel, format, args...) -} - -func ErrorfWithThreadID(threadID uint64, format string, args ...interface{}) { - logMessagefWithThreadID(threadID, globals.LogErrorLevel, format, args...) -} - // SetLogLevel changes the current log level (thread-safe) func SetLogLevel(level string) error { var newLevel globals.LogLevel diff --git a/lib/request-processor/main.go b/lib/request-processor/main.go index 254a96994..d6e674829 100644 --- a/lib/request-processor/main.go +++ b/lib/request-processor/main.go @@ -232,8 +232,7 @@ func RequestProcessorReportStats(instancePtr unsafe.Pointer, sink, kind string, clonedTimings := make([]int64, len(timings)) copy(clonedTimings, timings) - threadID := inst.GetThreadID() - go grpc.OnMonitoredSinkStats(threadID, inst.GetCurrentServer(), strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) + go grpc.OnMonitoredSinkStats(inst, strings.Clone(sink), strings.Clone(kind), attacksDetected, attacksBlocked, interceptorThrewError, withoutContext, total, clonedTimings) } //export RequestProcessorUninit From ac3c9e5ae9516fc101a2de4b24680719ac1564a5 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:24:52 +0000 Subject: [PATCH 157/170] Revert ticker initialization logic --- lib/agent/aikido_types/init_data.go | 13 ++++----- .../attackWaveDetector.go | 3 -- lib/agent/cloud/cloud.go | 24 ---------------- lib/agent/grpc/server.go | 28 ------------------- lib/agent/rate_limiting/rate_limiting.go | 4 --- 5 files changed, 5 insertions(+), 67 deletions(-) diff --git a/lib/agent/aikido_types/init_data.go b/lib/agent/aikido_types/init_data.go index 8428decc0..cb302dfa4 100644 --- a/lib/agent/aikido_types/init_data.go +++ b/lib/agent/aikido_types/init_data.go @@ -1,6 +1,7 @@ package aikido_types import ( + "main/constants" "main/log" "sync" "time" @@ -129,13 +130,13 @@ type ServerDataPolling struct { func NewServerDataPolling() *ServerDataPolling { return &ServerDataPolling{ HeartbeatRoutineChannel: make(chan struct{}), - HeartbeatTicker: nil, // Will be created on first request + HeartbeatTicker: time.NewTicker(10 * time.Minute), ConfigPollingRoutineChannel: make(chan struct{}), - ConfigPollingTicker: time.NewTicker(1 * time.Minute), // Start immediately for config updates + ConfigPollingTicker: time.NewTicker(1 * time.Minute), RateLimitingChannel: make(chan struct{}), - RateLimitingTicker: nil, // Will be created on first request + RateLimitingTicker: time.NewTicker(constants.MinRateLimitingIntervalInMs * time.Millisecond), AttackWaveChannel: make(chan struct{}), - AttackWaveTicker: nil, // Will be created on first request + AttackWaveTicker: time.NewTicker(1 * time.Minute), } } @@ -218,10 +219,6 @@ type ServerData struct { // In multi-worker mode (e.g., frankenphp-worker), only one worker should send it SentStartedEvent uint32 - // Ensures tickers start exactly once on first request - // Using sync.Once is safe to call from any context (including gRPC handlers) - StartTickersOnce sync.Once - // Last time this server established a gRPC connection LastConnectionTime int64 diff --git a/lib/agent/attack-wave-detection/attackWaveDetector.go b/lib/agent/attack-wave-detection/attackWaveDetector.go index 212e40e69..f9d9be27a 100644 --- a/lib/agent/attack-wave-detection/attackWaveDetector.go +++ b/lib/agent/attack-wave-detection/attackWaveDetector.go @@ -3,7 +3,6 @@ package attack_wave_detection import ( . "main/aikido_types" "main/utils" - "time" ) func AdvanceAttackWaveQueues(server *ServerData) { @@ -21,9 +20,7 @@ func AdvanceAttackWaveQueues(server *ServerData) { } // StartAttackWaveTicker starts the attack wave detection ticker -// Called on first request via sync.Once func StartAttackWaveTicker(server *ServerData) { - server.PollingData.AttackWaveTicker = time.NewTicker(1 * time.Minute) utils.StartPollingRoutine(server.PollingData.AttackWaveChannel, server.PollingData.AttackWaveTicker, AdvanceAttackWaveQueues, server) } diff --git a/lib/agent/cloud/cloud.go b/lib/agent/cloud/cloud.go index 8454e109e..d304e4725 100644 --- a/lib/agent/cloud/cloud.go +++ b/lib/agent/cloud/cloud.go @@ -2,9 +2,7 @@ package cloud import ( . "main/aikido_types" - "main/constants" "main/utils" - "time" ) func Init(server *ServerData) { @@ -13,29 +11,7 @@ func Init(server *ServerData) { CheckConfigUpdatedAt(server) - // Start config polling immediately (for cloud config updates) - // Heartbeat and other tickers will start on first request via StartAllTickers() utils.StartPollingRoutine(server.PollingData.ConfigPollingRoutineChannel, server.PollingData.ConfigPollingTicker, CheckConfigUpdatedAt, server) -} - -// StartAllTickers starts all tickers on first request -// Called via sync.Once to ensure exactly-once execution, safe from any context -func StartAllTickers(server *ServerData) { - // Determine initial heartbeat interval based on cloud config - // Default to 10 minutes (conservative) if config was never fetched - heartbeatInterval := 10 * time.Minute - - // Only use faster 1-minute interval if we successfully fetched config - // and cloud indicates this is a new server (ReceivedAnyStats = false) - if server.CloudConfig.ConfigUpdatedAt > 0 { - if !server.CloudConfig.ReceivedAnyStats { - heartbeatInterval = 1 * time.Minute - } else if server.CloudConfig.HeartbeatIntervalInMS >= constants.MinHeartbeatIntervalInMS { - heartbeatInterval = time.Duration(server.CloudConfig.HeartbeatIntervalInMS) * time.Millisecond - } - } - - server.PollingData.HeartbeatTicker = time.NewTicker(heartbeatInterval) utils.StartPollingRoutine(server.PollingData.HeartbeatRoutineChannel, server.PollingData.HeartbeatTicker, SendHeartbeatEvent, server) } diff --git a/lib/agent/grpc/server.go b/lib/agent/grpc/server.go index 9568fcf93..e3a848fb8 100644 --- a/lib/agent/grpc/server.go +++ b/lib/agent/grpc/server.go @@ -3,13 +3,11 @@ package grpc import ( "context" "fmt" - attack_wave_detection "main/attack-wave-detection" "main/cloud" "main/constants" "main/globals" "main/ipc/protos" "main/log" - rate_limiting "main/rate_limiting" "main/server_utils" "main/utils" "net" @@ -73,8 +71,6 @@ func (s *GrpcServer) OnDomain(ctx context.Context, req *protos.Domain) (*emptypb return &emptypb.Empty{}, nil } - startTickersOnce(server, "domain event") - log.Debugf(server.Logger, "Received domain: %s:%d", req.GetDomain(), req.GetPort()) storeDomain(server, req.GetDomain(), req.GetPort()) return &emptypb.Empty{}, nil @@ -86,30 +82,16 @@ func (s *GrpcServer) GetRateLimitingStatus(ctx context.Context, req *protos.Rate return &protos.RateLimitingStatus{Block: false}, nil } - startTickersOnce(server, "rate limiting") - log.Debugf(server.Logger, "Received rate limiting info: %s %s %s %s %s %s", req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()) return getRateLimitingStatus(server, req.GetMethod(), req.GetRoute(), req.GetRouteParsed(), req.GetUser(), req.GetIp(), req.GetRateLimitGroup()), nil } -// startTickersOnce is a helper function to start tickers exactly once -func startTickersOnce(server *ServerData, source string) { - server.StartTickersOnce.Do(func() { - log.Debugf(server.Logger, "Starting all tickers for server \"AIK_RUNTIME_***%s\" (via %s)", utils.AnonymizeToken(server.AikidoConfig.Token), source) - cloud.StartAllTickers(server) - rate_limiting.StartRateLimitingTicker(server) - attack_wave_detection.StartAttackWaveTicker(server) - }) -} - func (s *GrpcServer) OnRequestShutdown(ctx context.Context, req *protos.RequestMetadataShutdown) (*emptypb.Empty, error) { server := globals.GetServer(ServerKey{Token: req.GetToken(), ServerPID: req.GetServerPid()}) if server == nil { return &emptypb.Empty{}, nil } - startTickersOnce(server, "request shutdown") - log.Debugf(server.Logger, "Received request metadata: %s %s %d %s %s %v", req.GetMethod(), req.GetRouteParsed(), req.GetStatusCode(), req.GetUser(), req.GetIp(), req.GetApiSpec()) if req.GetShouldDiscoverRoute() || req.GetRateLimited() { go storeTotalStats(server, req.GetRateLimited()) @@ -142,8 +124,6 @@ func (s *GrpcServer) OnUser(ctx context.Context, req *protos.User) (*emptypb.Emp return &emptypb.Empty{}, nil } - startTickersOnce(server, "user event") - log.Debugf(server.Logger, "Received user event: %s", req.GetId()) go onUserEvent(server, req.GetId(), req.GetUsername(), req.GetIp()) return &emptypb.Empty{}, nil @@ -155,8 +135,6 @@ func (s *GrpcServer) OnAttackDetected(ctx context.Context, req *protos.AttackDet return &emptypb.Empty{}, nil } - startTickersOnce(server, "attack detection") - cloud.SendAttackDetectedEvent(server, req, "detected_attack") storeAttackStats(server, req) return &emptypb.Empty{}, nil @@ -168,8 +146,6 @@ func (s *GrpcServer) OnMonitoredSinkStats(ctx context.Context, req *protos.Monit return &emptypb.Empty{}, nil } - startTickersOnce(server, "sink stats") - storeSinkStats(server, req) return &emptypb.Empty{}, nil } @@ -192,8 +168,6 @@ func (s *GrpcServer) OnMonitoredIpMatch(ctx context.Context, req *protos.Monitor return &emptypb.Empty{}, nil } - startTickersOnce(server, "monitored IP match") - log.Debugf(server.Logger, "Received MonitoredIpMatch: %v", req.GetLists()) server.StatsData.StatsMutex.Lock() @@ -209,8 +183,6 @@ func (s *GrpcServer) OnMonitoredUserAgentMatch(ctx context.Context, req *protos. return &emptypb.Empty{}, nil } - startTickersOnce(server, "monitored user agent match") - log.Debugf(server.Logger, "Received MonitoredUserAgentMatch: %v", req.GetLists()) server.StatsData.StatsMutex.Lock() diff --git a/lib/agent/rate_limiting/rate_limiting.go b/lib/agent/rate_limiting/rate_limiting.go index 35bdd2496..66ad28236 100644 --- a/lib/agent/rate_limiting/rate_limiting.go +++ b/lib/agent/rate_limiting/rate_limiting.go @@ -2,9 +2,7 @@ package rate_limiting import ( . "main/aikido_types" - "main/constants" "main/utils" - "time" ) func AdvanceRateLimitingQueues(server *ServerData) { @@ -25,9 +23,7 @@ func AdvanceRateLimitingQueues(server *ServerData) { } // StartRateLimitingTicker starts the rate limiting ticker -// Called on first request via sync.Once func StartRateLimitingTicker(server *ServerData) { - server.PollingData.RateLimitingTicker = time.NewTicker(constants.MinRateLimitingIntervalInMs * time.Millisecond) utils.StartPollingRoutine(server.PollingData.RateLimitingChannel, server.PollingData.RateLimitingTicker, AdvanceRateLimitingQueues, server) } From 96e949f882d99747ec2f4c64c2a7b07055fe0db9 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:29:12 +0000 Subject: [PATCH 158/170] + --- lib/agent/server_utils/server.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/agent/server_utils/server.go b/lib/agent/server_utils/server.go index 90dec1c17..aecb1a111 100644 --- a/lib/agent/server_utils/server.go +++ b/lib/agent/server_utils/server.go @@ -39,6 +39,9 @@ func CompleteServerConfiguration(server *ServerData, serverKey ServerKey, req *p log.InfofMainAndServer(server.Logger, "Server \"AIK_RUNTIME_***%s\" (server PID: %d) registered successfully!", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) cloud.Init(server) + rate_limiting.StartRateLimitingTicker(server) + attack_wave_detection.StartAttackWaveTicker(server) + if globals.IsPastDeletedServer(serverKey) { log.InfofMainAndServer(server.Logger, "Server \"AIK_RUNTIME_***%s\" (server PID: %d) was registered before for this server PID, but deleted due to inactivity! Skipping start event as it was sent before...", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) } else { From 6c16cd1e191714bbd6cd763e8b95f3be0a4a77f8 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:35:57 +0000 Subject: [PATCH 159/170] Add go clean command to build workflow for agent and request-processor --- .github/workflows/build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b21cd7462..5f7382afe 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,6 +39,7 @@ jobs: cd lib protoc --go_out=agent --go-grpc_out=agent ipc.proto cd agent + go clean -cache -testcache go get main/ipc/protos go get google.golang.org/grpc go get github.com/stretchr/testify/assert @@ -52,6 +53,7 @@ jobs: cd lib protoc --go_out=request-processor --go-grpc_out=request-processor ipc.proto cd request-processor + go clean -cache -testcache go mod tidy go get google.golang.org/grpc go get github.com/stretchr/testify/assert From 487796812f40d40b0cee22dd934c80f0f7b1bb98 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:42:58 +0000 Subject: [PATCH 160/170] Update build workflow to include -modcache option in go clean for agent and request-processor --- .github/workflows/build.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5f7382afe..35093e9a1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -39,7 +39,7 @@ jobs: cd lib protoc --go_out=agent --go-grpc_out=agent ipc.proto cd agent - go clean -cache -testcache + go clean -cache -testcache -modcache go get main/ipc/protos go get google.golang.org/grpc go get github.com/stretchr/testify/assert @@ -53,7 +53,7 @@ jobs: cd lib protoc --go_out=request-processor --go-grpc_out=request-processor ipc.proto cd request-processor - go clean -cache -testcache + go clean -cache -testcache -modcache go mod tidy go get google.golang.org/grpc go get github.com/stretchr/testify/assert From 6a79fd08397ddc976e82f10ce5c9ca4da7b21465 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 11:49:22 +0000 Subject: [PATCH 161/170] Add missing test to branch --- .../ssrf/checkPostRequestSSRF_test.go | 140 ++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 lib/request-processor/vulnerabilities/ssrf/checkPostRequestSSRF_test.go diff --git a/lib/request-processor/vulnerabilities/ssrf/checkPostRequestSSRF_test.go b/lib/request-processor/vulnerabilities/ssrf/checkPostRequestSSRF_test.go new file mode 100644 index 000000000..d3d2e3b1d --- /dev/null +++ b/lib/request-processor/vulnerabilities/ssrf/checkPostRequestSSRF_test.go @@ -0,0 +1,140 @@ +package ssrf + +import ( + "main/context" + "main/instance" + "main/utils" + "testing" +) + +func TestCheckResolvedIpForSSRF_NoStoredInterceptorResult_ReturnsNil(t *testing.T) { + inst := instance.NewRequestProcessorInstance(0, false) + context.ResetEventContext(inst) + t.Cleanup(func() { context.ResetEventContext(inst) }) + + if res := CheckResolvedIpForSSRF(inst, "127.0.0.1"); res != nil { + t.Fatalf("expected nil, got %#v", res) + } +} + +func TestCheckResolvedIpForSSRF_PublicIp_ReturnsNil(t *testing.T) { + inst := instance.NewRequestProcessorInstance(0, false) + context.ResetEventContext(inst) + t.Cleanup(func() { context.ResetEventContext(inst) }) + + ir := &utils.InterceptorResult{ + Operation: "curl_exec", + Kind: utils.Ssrf, + Source: "body", + Metadata: map[string]string{}, + Payload: "http://example.test", + } + context.EventContextSetCurrentSsrfInterceptorResult(inst, ir) + + if res := CheckResolvedIpForSSRF(inst, "8.8.8.8"); res != nil { + t.Fatalf("expected nil, got %#v", res) + } + if _, ok := ir.Metadata["isPrivateIp"]; ok { + t.Fatalf("expected isPrivateIp to not be set for public IP") + } + if _, ok := ir.Metadata["resolvedIp"]; ok { + t.Fatalf("expected resolvedIp to not be set for public IP") + } +} + +func TestCheckResolvedIpForSSRF_PrivateIp_ReturnsInterceptorResultWithMetadata(t *testing.T) { + inst := instance.NewRequestProcessorInstance(0, false) + context.ResetEventContext(inst) + t.Cleanup(func() { context.ResetEventContext(inst) }) + + ir := &utils.InterceptorResult{ + Operation: "curl_exec", + Kind: utils.Ssrf, + Source: "body", + Metadata: map[string]string{}, + Payload: "http://example.test", + } + context.EventContextSetCurrentSsrfInterceptorResult(inst, ir) + + res := CheckResolvedIpForSSRF(inst, "127.0.0.1") + if res == nil { + t.Fatalf("expected non-nil interceptor result") + } + if res != ir { + t.Fatalf("expected returned interceptor result to be the stored one") + } + if got := res.Metadata["resolvedIp"]; got != "127.0.0.1" { + t.Fatalf("expected resolvedIp=127.0.0.1, got %q", got) + } + if got := res.Metadata["isPrivateIp"]; got != "true" { + t.Fatalf("expected isPrivateIp=true, got %q", got) + } +} + +func TestCheckEffectiveHostnameForSSRF_PrivateIpHostname_ReturnsInterceptorResultWithMetadata(t *testing.T) { + inst := instance.NewRequestProcessorInstance(0, false) + context.ResetEventContext(inst) + t.Cleanup(func() { context.ResetEventContext(inst) }) + + ir := &utils.InterceptorResult{ + Operation: "curl_exec", + Kind: utils.Ssrf, + Source: "body", + Metadata: map[string]string{}, + Payload: "http://example.test", + } + context.EventContextSetCurrentSsrfInterceptorResult(inst, ir) + + res := CheckEffectiveHostnameForSSRF(inst, "127.0.0.1") + if res == nil { + t.Fatalf("expected non-nil interceptor result") + } + if res != ir { + t.Fatalf("expected returned interceptor result to be the stored one") + } + if got := res.Metadata["effectiveHostname"]; got != "127.0.0.1" { + t.Fatalf("expected effectiveHostname=127.0.0.1, got %q", got) + } + if got := res.Metadata["resolvedIp"]; got != "127.0.0.1" { + t.Fatalf("expected resolvedIp=127.0.0.1, got %q", got) + } + if got := res.Metadata["isPrivateIp"]; got != "true" { + t.Fatalf("expected isPrivateIp=true, got %q", got) + } +} + +func TestCheckEffectiveHostnameForSSRF_IMDSHostname_ReturnsInterceptorResultWithIMDSMetadata(t *testing.T) { + inst := instance.NewRequestProcessorInstance(0, false) + context.ResetEventContext(inst) + t.Cleanup(func() { context.ResetEventContext(inst) }) + + ir := &utils.InterceptorResult{ + Operation: "curl_exec", + Kind: utils.Ssrf, + Source: "body", + Metadata: map[string]string{}, + Payload: "http://example.test", + } + context.EventContextSetCurrentSsrfInterceptorResult(inst, ir) + + res := CheckEffectiveHostnameForSSRF(inst, "169.254.169.254") + if res == nil { + t.Fatalf("expected non-nil interceptor result") + } + if res != ir { + t.Fatalf("expected returned interceptor result to be the stored one") + } + if got := res.Metadata["effectiveHostname"]; got != "169.254.169.254" { + t.Fatalf("expected effectiveHostname=169.254.169.254, got %q", got) + } + if got := res.Metadata["resolvedIp"]; got != "169.254.169.254" { + t.Fatalf("expected resolvedIp=169.254.169.254, got %q", got) + } + if got := res.Metadata["isIMDSIp"]; got != "true" { + t.Fatalf("expected isIMDSIp=true, got %q", got) + } + // IMDS IPv4 is also in private ranges in our implementation. + if got := res.Metadata["isPrivateIp"]; got != "true" { + t.Fatalf("expected isPrivateIp=true, got %q", got) + } +} From c77bc3dd236b23f8ffd6104285ce014058f0d9da Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 12:01:45 +0000 Subject: [PATCH 162/170] Rollback HandleBypassedIp logic --- lib/agent/server_utils/server.go | 2 +- lib/php-extension/Aikido.cpp | 1 - lib/php-extension/HandleBypassedIp.cpp | 15 +-------------- lib/php-extension/PhpLifecycle.cpp | 3 +-- lib/php-extension/include/php_aikido.h | 1 - 5 files changed, 3 insertions(+), 19 deletions(-) diff --git a/lib/agent/server_utils/server.go b/lib/agent/server_utils/server.go index aecb1a111..9916a4f30 100644 --- a/lib/agent/server_utils/server.go +++ b/lib/agent/server_utils/server.go @@ -41,7 +41,7 @@ func CompleteServerConfiguration(server *ServerData, serverKey ServerKey, req *p cloud.Init(server) rate_limiting.StartRateLimitingTicker(server) attack_wave_detection.StartAttackWaveTicker(server) - + if globals.IsPastDeletedServer(serverKey) { log.InfofMainAndServer(server.Logger, "Server \"AIK_RUNTIME_***%s\" (server PID: %d) was registered before for this server PID, but deleted due to inactivity! Skipping start event as it was sent before...", utils.AnonymizeToken(serverKey.Token), serverKey.ServerPID) } else { diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index c3b6c7ac3..d0ceb06c2 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -120,7 +120,6 @@ PHP_GINIT_FUNCTION(aikido) { aikido_globals->laravelEnvLoaded = false; aikido_globals->checkedAutoBlock = false; aikido_globals->checkedShouldBlockRequest = false; - aikido_globals->checkedIpBypass = false; aikido_globals->isIpBypassed = false; aikido_globals->global_ast_to_clean = nullptr; aikido_globals->original_ast_process = nullptr; diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index b4e26c155..cf0932634 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -4,10 +4,6 @@ // If true, all blocking checks will be skipped. // Accessed via AIKIDO_GLOBAL(isIpBypassed). -// The checkedIpBypass module global variable is used to check if IP bypass check -// has already been called, in order to avoid multiple calls to this function. -// Accessed via AIKIDO_GLOBAL(checkedIpBypass). - void InitIpBypassCheck() { ScopedTimer scopedTimer("check_ip_bypass", "aikido_op"); @@ -21,15 +17,6 @@ void InitIpBypassCheck() { } bool IsAikidoDisabledOrBypassed() { - if (AIKIDO_GLOBAL(disable) == true) { - return true; - } - - if (!AIKIDO_GLOBAL(checkedIpBypass)) { - AIKIDO_GLOBAL(checkedIpBypass) = true; - InitIpBypassCheck(); - } - - return AIKIDO_GLOBAL(isIpBypassed); + return AIKIDO_GLOBAL(disable) == true || AIKIDO_GLOBAL(isIpBypassed); } diff --git a/lib/php-extension/PhpLifecycle.cpp b/lib/php-extension/PhpLifecycle.cpp index 5d0013117..e6629ed49 100644 --- a/lib/php-extension/PhpLifecycle.cpp +++ b/lib/php-extension/PhpLifecycle.cpp @@ -17,8 +17,7 @@ void PhpLifecycle::RequestInit() { AIKIDO_GLOBAL(requestProcessor).RequestInit(); AIKIDO_GLOBAL(checkedAutoBlock) = false; AIKIDO_GLOBAL(checkedShouldBlockRequest) = false; - AIKIDO_GLOBAL(checkedIpBypass) = false; - AIKIDO_GLOBAL(isIpBypassed) = false; + InitIpBypassCheck(); } void PhpLifecycle::RequestShutdown() { diff --git a/lib/php-extension/include/php_aikido.h b/lib/php-extension/include/php_aikido.h index bb19eac26..ceced7b75 100644 --- a/lib/php-extension/include/php_aikido.h +++ b/lib/php-extension/include/php_aikido.h @@ -41,7 +41,6 @@ uint64_t totalOverheadForCurrentRequest; bool laravelEnvLoaded; bool checkedAutoBlock; bool checkedShouldBlockRequest; -bool checkedIpBypass; bool isIpBypassed; HashTable *global_ast_to_clean; void (*original_ast_process)(zend_ast *ast); From a42436ec4f5b0a2d5fb5b24bcc1e40a4910f0efb Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 12:09:40 +0000 Subject: [PATCH 163/170] More refactoring --- lib/php-extension/PhpWrappers.cpp | 8 +++++--- lib/php-extension/RequestProcessor.cpp | 16 +++------------- lib/php-extension/include/RequestProcessor.h | 2 +- 3 files changed, 9 insertions(+), 17 deletions(-) diff --git a/lib/php-extension/PhpWrappers.cpp b/lib/php-extension/PhpWrappers.cpp index 6572a598c..446fff300 100644 --- a/lib/php-extension/PhpWrappers.cpp +++ b/lib/php-extension/PhpWrappers.cpp @@ -38,13 +38,15 @@ bool CallPhpFunction(std::string function_name, unsigned int params_number, zval zval_dtor(&_function_name); - if (_result != SUCCESS) { - return false; - } if (!return_value) { zval_ptr_dtor(&_temp_return_value); } + + if (_result != SUCCESS) { + return false; + } + return true; } diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index 0d5c8d8e1..9836a501e 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -1,12 +1,12 @@ #include "Includes.h" -std::string RequestProcessor::GetInitData(const std::string& token) { +std::string RequestProcessor::GetInitData(const std::string& userProvidedToken) { LoadLaravelEnvFile(); LoadEnvironment(); auto& globalToken = AIKIDO_GLOBAL(token); - if (!token.empty()) { - globalToken = token; + if (!userProvidedToken.empty()) { + globalToken = userProvidedToken; } unordered_map packages = GetPackages(); AIKIDO_GLOBAL(uses_symfony_http_foundation) = packages.find("symfony/http-foundation") != packages.end(); @@ -220,12 +220,6 @@ bool RequestProcessor::RequestInit() { if (sapiName == "apache2handler" || sapiName == "frankenphp") { // Apache-mod-php and FrankenPHP can serve multiple sites per process // We need to reload config each request to detect token changes - // Check disable BEFORE modifying any state (shared Go state or per-instance state) - // Use GetEnvBool() to read disable flag without modifying global state - if (GetEnvBool("AIKIDO_DISABLE", false)) { - AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1!\n"); - return true; - } this->LoadConfigFromEnvironment(); } else { // Server APIs that are not apache-mod-php/frankenphp (like php-fpm, cli-server, ...) @@ -234,10 +228,6 @@ bool RequestProcessor::RequestInit() { // The user can update .env file via zero downtime deployments after the PHP server is started. if (AIKIDO_GLOBAL(token) == "") { AIKIDO_LOG_INFO("Loading Aikido config until we get a valid token for SAPI: %s...\n", AIKIDO_GLOBAL(sapi_name).c_str()); - if (GetEnvBool("AIKIDO_DISABLE", false)) { - AIKIDO_LOG_INFO("Request Processor initialization skipped because AIKIDO_DISABLE is set to 1!\n"); - return true; - } this->LoadConfigFromEnvironment(); } } diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index 2fe0adfa6..aebd7d91e 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -32,7 +32,7 @@ class RequestProcessor { RequestProcessorUninitFn requestProcessorUninitFn = nullptr; private: - std::string GetInitData(const std::string& token = ""); + std::string GetInitData(const std::string& userProvidedToken = ""); bool ContextInit(); void SendPreRequestEvent(); void SendPostRequestEvent(); From c930c468a65b68ab6ebad2e5083379db0375d41f Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 12:17:42 +0000 Subject: [PATCH 164/170] Fix HandleBypassedIp header --- lib/php-extension/include/HandleBypassedIp.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/php-extension/include/HandleBypassedIp.h b/lib/php-extension/include/HandleBypassedIp.h index e0d6f1b69..b3c0e59ea 100644 --- a/lib/php-extension/include/HandleBypassedIp.h +++ b/lib/php-extension/include/HandleBypassedIp.h @@ -1,5 +1,5 @@ #pragma once -// Check if Aikido is disabled or the current IP is bypassed. -// The IP bypass check is performed lazily on first call. +void InitIpBypassCheck(); + bool IsAikidoDisabledOrBypassed(); From 7d95987315d3e26357193fa35c40b60a57d3d8d9 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 13:54:22 +0000 Subject: [PATCH 165/170] Reset IP bypass state for new requests and skip config load during initial FrankenPHP warm-up request --- lib/php-extension/HandleBypassedIp.cpp | 3 +++ lib/php-extension/RequestProcessor.cpp | 10 +++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/lib/php-extension/HandleBypassedIp.cpp b/lib/php-extension/HandleBypassedIp.cpp index cf0932634..da0611086 100644 --- a/lib/php-extension/HandleBypassedIp.cpp +++ b/lib/php-extension/HandleBypassedIp.cpp @@ -5,6 +5,9 @@ // Accessed via AIKIDO_GLOBAL(isIpBypassed). void InitIpBypassCheck() { + // Reset state for new request (so it's not cached from previous request) + AIKIDO_GLOBAL(isIpBypassed) = false; + ScopedTimer scopedTimer("check_ip_bypass", "aikido_op"); try { diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index 9836a501e..0b6224026 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -250,6 +250,7 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s if (this->requestProcessorConfigUpdateFn == nullptr || this->requestProcessorInstance == nullptr) { return; } + if (currentToken.empty()) { AIKIDO_LOG_INFO("Current token is empty, skipping config reload...!\n"); return; @@ -265,9 +266,16 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } void RequestProcessor::LoadConfigFromEnvironment() { + // SKIP config load for the first frankenphp warmp-up request + if(sapi_module.name == "frankenphp") { + if(GetEnvBool("FRANKENPHP_WORKER", false) && !this->numberOfRequests) { + return; + } + } + auto& globalToken = AIKIDO_GLOBAL(token); std::string previousToken = globalToken; - + LoadEnvironment(); std::string currentToken = globalToken; From 87cd5367a77e27d992fecef51c840d8d18d38801 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 14:18:30 +0000 Subject: [PATCH 166/170] Add cleanup steps to Apache and Nginx pre-test functions --- tools/server_tests/apache/main.py | 8 ++++++++ tools/server_tests/nginx/main.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/tools/server_tests/apache/main.py b/tools/server_tests/apache/main.py index 250e33ab7..272dbc6ff 100755 --- a/tools/server_tests/apache/main.py +++ b/tools/server_tests/apache/main.py @@ -295,6 +295,14 @@ def apache_mod_php_process_test(test_data): def apache_mod_php_pre_tests(): + # Stop any existing Apache processes first + subprocess.run(['pkill', apache_binary], stderr=subprocess.DEVNULL) + subprocess.run(['pkill', '-9', apache_binary], stderr=subprocess.DEVNULL) + time.sleep(1) + + # Clean up log files + subprocess.run(['rm', '-rf', f'/var/log/aikido-*/*'], stderr=subprocess.DEVNULL) + if not os.path.exists('/etc/httpd'): # Debian/Ubuntu Apache - use apache2ctl which sources /etc/apache2/envvars # This ensures APACHE_RUN_DIR and other variables are properly set diff --git a/tools/server_tests/nginx/main.py b/tools/server_tests/nginx/main.py index 50aaa2952..8dc4cbb36 100644 --- a/tools/server_tests/nginx/main.py +++ b/tools/server_tests/nginx/main.py @@ -188,6 +188,9 @@ def nginx_php_fpm_process_test(test_data): def nginx_php_fpm_pre_tests(): subprocess.run(['pkill', 'nginx']) subprocess.run(['pkill', 'php-fpm']) + time.sleep(2) + subprocess.run(['pkill', '-9', 'php-fpm'], stderr=subprocess.DEVNULL) + time.sleep(2) subprocess.run(['rm', '-rf', f'{log_dir}/nginx/*']) subprocess.run(['rm', '-rf', f'{log_dir}/php-fpm/*']) subprocess.run(['rm', '-rf', f'{log_dir}/aikido-*/*']) From 01deb212b40106f543abdffcbdf1a167279a2e87 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 14:57:50 +0000 Subject: [PATCH 167/170] Refactor FrankenPHP checks in Aikido extension to use global variable and improve logging during warm-up requests --- lib/php-extension/Aikido.cpp | 2 +- lib/php-extension/RequestProcessor.cpp | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index d0ceb06c2..cab174b39 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -6,7 +6,7 @@ ZEND_DECLARE_MODULE_GLOBALS(aikido) PHP_MINIT_FUNCTION(aikido) { // For FrankenPHP: Set sapi_name but skip rest of LoadEnvironment during MINIT // Full environment will be loaded in RINIT when Caddyfile env vars are available - if (sapi_module.name == std::string("frankenphp")) { + if (std::string(sapi_module.name) == "frankenphp") { AIKIDO_GLOBAL(sapi_name) = sapi_module.name; } else { // For other SAPIs: Load environment during MINIT as normal diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index 0b6224026..ca1619bee 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -266,9 +266,10 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } void RequestProcessor::LoadConfigFromEnvironment() { - // SKIP config load for the first frankenphp warmp-up request - if(sapi_module.name == "frankenphp") { + // SKIP config load for the first frankenphp warm-up request + if(AIKIDO_GLOBAL(sapi_name) == "frankenphp") { if(GetEnvBool("FRANKENPHP_WORKER", false) && !this->numberOfRequests) { + AIKIDO_LOG_INFO("FrankenPHP worker warm-up request detected, skipping config update\n"); return; } } From a5b65ad991137f490caeec969016a52db75dd075 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 16:07:15 +0000 Subject: [PATCH 168/170] Refactor LoadConfigFromEnvironment method in RequestProcessor to return a boolean value, enhancing error handling and logging during FrankenPHP warm-up requests. --- lib/php-extension/RequestProcessor.cpp | 16 ++++++++++------ lib/php-extension/include/RequestProcessor.h | 2 +- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index ca1619bee..fd2c3e052 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -216,11 +216,14 @@ bool RequestProcessor::RequestInit() { } - const auto& sapiName = AIKIDO_GLOBAL(sapi_name); + std::string sapiName = sapi_module.name; if (sapiName == "apache2handler" || sapiName == "frankenphp") { // Apache-mod-php and FrankenPHP can serve multiple sites per process // We need to reload config each request to detect token changes - this->LoadConfigFromEnvironment(); + if(!this->LoadConfigFromEnvironment()) { + this->numberOfRequests++; + return true; + } } else { // Server APIs that are not apache-mod-php/frankenphp (like php-fpm, cli-server, ...) // can only serve one site per process, so the config should be loaded at the first request. @@ -265,12 +268,12 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s this->requestProcessorConfigUpdateFn(this->requestProcessorInstance, GoCreateString(initJson)); } -void RequestProcessor::LoadConfigFromEnvironment() { +bool RequestProcessor::LoadConfigFromEnvironment() { // SKIP config load for the first frankenphp warm-up request - if(AIKIDO_GLOBAL(sapi_name) == "frankenphp") { + if(std::string(sapi_module.name) == "frankenphp") { if(GetEnvBool("FRANKENPHP_WORKER", false) && !this->numberOfRequests) { - AIKIDO_LOG_INFO("FrankenPHP worker warm-up request detected, skipping config update\n"); - return; + AIKIDO_LOG_INFO("FrankenPHP worker warm-up request detected, skipping RequestInit\n"); + return false; } } @@ -282,6 +285,7 @@ void RequestProcessor::LoadConfigFromEnvironment() { std::string currentToken = globalToken; LoadConfig(previousToken, currentToken); + return true; } void RequestProcessor::LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware) { diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index aebd7d91e..faa333e33 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -46,7 +46,7 @@ class RequestProcessor { bool IsBlockingEnabled(); bool ReportStats(); void LoadConfig(const std::string& previousToken, const std::string& currentToken); - void LoadConfigFromEnvironment(); + bool LoadConfigFromEnvironment(); void LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware); void RequestShutdown(); void Uninit(); From 01be819b06938b3ce5054a504937f8d5b5efb115 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 16:26:03 +0000 Subject: [PATCH 169/170] Update FrankenPHP checks in Environment and RequestProcessor to use sapi_module.name for consistency and clarity in warm-up request handling. --- lib/php-extension/Environment.cpp | 2 +- lib/php-extension/RequestProcessor.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/php-extension/Environment.cpp b/lib/php-extension/Environment.cpp index 5ad3911fa..cbf53a240 100644 --- a/lib/php-extension/Environment.cpp +++ b/lib/php-extension/Environment.cpp @@ -102,7 +102,7 @@ bool LoadLaravelEnvFile() { This function reads environment variables from $_SERVER for FrankenPHP compatibility. */ std::string GetFrankenEnvVariable(const std::string& env_key) { - if (AIKIDO_GLOBAL(sapi_name) != "frankenphp") { + if (std::string(sapi_module.name) != "frankenphp") { return ""; } diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index fd2c3e052..da3a70768 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -269,9 +269,9 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s } bool RequestProcessor::LoadConfigFromEnvironment() { - // SKIP config load for the first frankenphp warm-up request + // SKIP config load for frankenphp warm-up request if(std::string(sapi_module.name) == "frankenphp") { - if(GetEnvBool("FRANKENPHP_WORKER", false) && !this->numberOfRequests) { + if(GetEnvBool("FRANKENPHP_WORKER", false)) { AIKIDO_LOG_INFO("FrankenPHP worker warm-up request detected, skipping RequestInit\n"); return false; } From bbcf868848ecf1dbc8c2a528adf399a642caa360 Mon Sep 17 00:00:00 2001 From: ioaniftimesei Date: Thu, 8 Jan 2026 16:48:31 +0000 Subject: [PATCH 170/170] Refactor RequestProcessor and Aikido extension to streamline FrankenPHP warm-up request handling --- lib/php-extension/Aikido.cpp | 7 +++++++ lib/php-extension/RequestProcessor.cpp | 16 ++-------------- lib/php-extension/include/RequestProcessor.h | 2 +- 3 files changed, 10 insertions(+), 15 deletions(-) diff --git a/lib/php-extension/Aikido.cpp b/lib/php-extension/Aikido.cpp index cab174b39..ebc426195 100644 --- a/lib/php-extension/Aikido.cpp +++ b/lib/php-extension/Aikido.cpp @@ -67,6 +67,13 @@ PHP_MSHUTDOWN_FUNCTION(aikido) { PHP_RINIT_FUNCTION(aikido) { ScopedTimer scopedTimer("request_init", "request_op"); + if (std::string(sapi_module.name) == "frankenphp") { + if (GetEnvBool("FRANKENPHP_WORKER", false)) { + AIKIDO_LOG_INFO("RINIT: Skipping FrankenPHP warm-up request\n"); + return SUCCESS; + } + } + AIKIDO_GLOBAL(phpLifecycle).RequestInit(); AIKIDO_LOG_DEBUG("RINIT finished!\n"); diff --git a/lib/php-extension/RequestProcessor.cpp b/lib/php-extension/RequestProcessor.cpp index da3a70768..c23918ec0 100644 --- a/lib/php-extension/RequestProcessor.cpp +++ b/lib/php-extension/RequestProcessor.cpp @@ -220,10 +220,7 @@ bool RequestProcessor::RequestInit() { if (sapiName == "apache2handler" || sapiName == "frankenphp") { // Apache-mod-php and FrankenPHP can serve multiple sites per process // We need to reload config each request to detect token changes - if(!this->LoadConfigFromEnvironment()) { - this->numberOfRequests++; - return true; - } + this->LoadConfigFromEnvironment(); } else { // Server APIs that are not apache-mod-php/frankenphp (like php-fpm, cli-server, ...) // can only serve one site per process, so the config should be loaded at the first request. @@ -268,15 +265,7 @@ void RequestProcessor::LoadConfig(const std::string& previousToken, const std::s this->requestProcessorConfigUpdateFn(this->requestProcessorInstance, GoCreateString(initJson)); } -bool RequestProcessor::LoadConfigFromEnvironment() { - // SKIP config load for frankenphp warm-up request - if(std::string(sapi_module.name) == "frankenphp") { - if(GetEnvBool("FRANKENPHP_WORKER", false)) { - AIKIDO_LOG_INFO("FrankenPHP worker warm-up request detected, skipping RequestInit\n"); - return false; - } - } - +void RequestProcessor::LoadConfigFromEnvironment() { auto& globalToken = AIKIDO_GLOBAL(token); std::string previousToken = globalToken; @@ -285,7 +274,6 @@ bool RequestProcessor::LoadConfigFromEnvironment() { std::string currentToken = globalToken; LoadConfig(previousToken, currentToken); - return true; } void RequestProcessor::LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware) { diff --git a/lib/php-extension/include/RequestProcessor.h b/lib/php-extension/include/RequestProcessor.h index faa333e33..aebd7d91e 100644 --- a/lib/php-extension/include/RequestProcessor.h +++ b/lib/php-extension/include/RequestProcessor.h @@ -46,7 +46,7 @@ class RequestProcessor { bool IsBlockingEnabled(); bool ReportStats(); void LoadConfig(const std::string& previousToken, const std::string& currentToken); - bool LoadConfigFromEnvironment(); + void LoadConfigFromEnvironment(); void LoadConfigWithTokenFromPHPSetToken(const std::string& tokenFromMiddleware); void RequestShutdown(); void Uninit();