diff --git a/.github/workflows/osrm-backend.yml b/.github/workflows/osrm-backend.yml index fe4ee32482..a8593f8864 100644 --- a/.github/workflows/osrm-backend.yml +++ b/.github/workflows/osrm-backend.yml @@ -23,636 +23,636 @@ concurrency: cancel-in-progress: true jobs: - windows-release-node: - needs: format-taginfo-docs - runs-on: windows-2022 - continue-on-error: false - env: - BUILD_TYPE: Release - steps: - - uses: actions/checkout@v4 - - run: pip install "conan<2.0.0" - - run: conan --version - - run: cmake --version - - uses: actions/setup-node@v4 - with: - node-version: 18 - - run: node --version - - run: npm --version - - name: Prepare environment - shell: bash - run: | - PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - - run: npm install --ignore-scripts - - run: npm link --ignore-scripts - - name: Build - shell: bash - run: | - mkdir build - cd build - cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. - cmake --build . --config Release + # windows-release-node: + # needs: format-taginfo-docs + # runs-on: windows-2022 + # continue-on-error: false + # env: + # BUILD_TYPE: Release + # steps: + # - uses: actions/checkout@v4 + # - run: pip install "conan<2.0.0" + # - run: conan --version + # - run: cmake --version + # - uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - run: node --version + # - run: npm --version + # - name: Prepare environment + # shell: bash + # run: | + # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + # - run: npm install --ignore-scripts + # - run: npm link --ignore-scripts + # - name: Build + # shell: bash + # run: | + # mkdir build + # cd build + # cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON .. + # cmake --build . --config Release - # TODO: MSVC goes out of memory when building our tests - # - name: Run tests - # shell: bash - # run: | - # cd build - # cmake --build . --config Release --target tests - # # TODO: run tests - # - name: Run node tests - # shell: bash - # run: | - # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf - - # mkdir -p test/data/ch - # cp test/data/monaco.osrm* test/data/ch/ - # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm - - # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm - # node test/nodejs/index.js - - name: Build Node package - shell: bash - run: ./scripts/ci/node_package.sh - - name: Publish Node package - if: ${{ env.PUBLISH == 'On' }} - uses: ncipollo/release-action@v1 - with: - allowUpdates: true - artifactErrorsFailBuild: true - artifacts: build/stage/**/*.tar.gz - omitBody: true - omitBodyDuringUpdate: true - omitName: true - omitNameDuringUpdate: true - replacesArtifacts: true - token: ${{ secrets.GITHUB_TOKEN }} - - format-taginfo-docs: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v4 - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 18 - - name: Enable Node.js cache - uses: actions/cache@v4 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- - - name: Prepare environment - run: | - npm ci --ignore-scripts - clang-format-15 --version - - name: Run checks - run: | - ./scripts/check_taginfo.py taginfo.json profiles/car.lua - ./scripts/format.sh && ./scripts/error_on_dirty.sh - node ./scripts/validate_changelog.js - npm run docs && ./scripts/error_on_dirty.sh - npm audit --production - - docker-image-matrix: - strategy: - matrix: - docker-base-image: ["debian", "alpine"] - needs: format-taginfo-docs - runs-on: ubuntu-22.04 - continue-on-error: false - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Enable osm.pbf cache - uses: actions/cache@v4 - with: - path: berlin-latest.osm.pbf - key: v1-berlin-osm-pbf - restore-keys: | - v1-berlin-osm-pbf - - name: Docker build - run: | - docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . - - name: Test Docker image - run: | - if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then - wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf - fi - TAG=osrm-backend-local - # when `--memory-swap` value equals `--memory` it means container won't use swap - # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details - MEMORY_ARGS="--memory=1g --memory-swap=1g" - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson - if [ ! -s "${PWD}/berlin-latest.geojson" ] - then - >&2 echo "No berlin-latest.geojson found" - exit 1 - fi - # removing `.osrm.nbg` to check that whole pipeline works without it - rm -rf "${PWD}/berlin-latest.osrm.nbg" - - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm - docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm - docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & - curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" - docker stop osrm-container - - build-test-publish: - needs: format-taginfo-docs - strategy: - matrix: - include: - - name: gcc-13-debug-cov - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: gcc-13 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: g++-13 - ENABLE_COVERAGE: ON - - - name: clang-18-debug-asan-ubsan - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CUCUMBER_TIMEOUT: 20000 - CXXCOMPILER: clang++-18 - ENABLE_SANITIZER: ON - TARGET_ARCH: x86_64-asan-ubsan - OSRM_CONNECTION_RETRIES: 10 - OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 - - - name: clang-18-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF - - - name: clang-18-debug - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF - - - name: clang-18-debug-clang-tidy - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Debug - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - CUCUMBER_TIMEOUT: 60000 - ENABLE_CLANG_TIDY: ON - - - - name: clang-17-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-17 - CXXCOMPILER: clang++-17 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF - - - name: clang-16-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - CUCUMBER_TIMEOUT: 60000 - ENABLE_LTO: OFF - - - name: conan-linux-debug-asan-ubsan - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_SANITIZER: ON - ENABLE_LTO: OFF - - - name: conan-linux-release - continue-on-error: false - node: 18 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: clang-18 - CXXCOMPILER: clang++-18 - ENABLE_CONAN: ON - ENABLE_LTO: OFF - - - name: gcc-14-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-14 - CXXCOMPILER: g++-14 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - - name: gcc-13-release - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-13 - CXXCOMPILER: g++-13 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - - name: gcc-12-release - continue-on-error: false - node: 20 - runs-on: ubuntu-22.04 - BUILD_TOOLS: ON - BUILD_TYPE: Release - CCOMPILER: gcc-12 - CXXCOMPILER: g++-12 - CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' - - - name: conan-linux-release-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Release - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON - - - name: conan-linux-debug-node - build_node_package: true - continue-on-error: false - node: 20 - runs-on: ubuntu-24.04 - BUILD_TYPE: Debug - CCOMPILER: clang-16 - CXXCOMPILER: clang++-16 - ENABLE_CONAN: ON - NODE_PACKAGE_TESTS_ONLY: ON - - - name: conan-macos-x64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-13 # x86_64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON - - - name: conan-macos-arm64-release-node - build_node_package: true - continue-on-error: true - node: 20 - runs-on: macos-14 # arm64 - BUILD_TYPE: Release - CCOMPILER: clang - CXXCOMPILER: clang++ - CUCUMBER_TIMEOUT: 60000 - ENABLE_ASSERTIONS: ON - ENABLE_CONAN: ON - - name: ${{ matrix.name}} - continue-on-error: ${{ matrix.continue-on-error }} - runs-on: ${{ matrix.runs-on }} - env: - BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} - BUILD_TYPE: ${{ matrix.BUILD_TYPE }} - BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} - CCOMPILER: ${{ matrix.CCOMPILER }} - CFLAGS: ${{ matrix.CFLAGS }} - CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} - CXXCOMPILER: ${{ matrix.CXXCOMPILER }} - CXXFLAGS: ${{ matrix.CXXFLAGS }} - ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} - ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} - ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} - ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} - ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} - NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} - TARGET_ARCH: ${{ matrix.TARGET_ARCH }} - OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} - OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} - ENABLE_LTO: ${{ matrix.ENABLE_LTO }} - steps: - - uses: actions/checkout@v4 - - name: Build machine architecture - run: uname -m - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - name: Enable Node.js cache - uses: actions/cache@v4 - with: - path: ~/.npm - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- - - name: Enable compiler cache - uses: actions/cache@v4 - with: - path: ~/.ccache - key: ccache-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - ccache-${{ matrix.name }}- - - name: Enable Conan cache - uses: actions/cache@v4 - with: - path: ~/.conan - key: v9-conan-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - v9-conan-${{ matrix.name }}- - - name: Enable test cache - uses: actions/cache@v4 - with: - path: ${{github.workspace}}/test/cache - key: v4-test-${{ matrix.name }}-${{ github.sha }} - restore-keys: | - v4-test-${{ matrix.name }}- - - name: Prepare environment - run: | - echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV - mkdir -p $HOME/.ccache - - PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") - echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV - echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV - echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV - if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then - # We can only set this after checkout once we know the workspace directory - echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV - echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV - echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV - fi - - if [[ "${RUNNER_OS}" == "Linux" ]]; then - echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV - fi - # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 - # We need it to be able to access system folders while restoring cached Boost below - - name: Give tar root ownership - if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar - - name: Cache Boost - if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - id: cache-boost - uses: actions/cache@v4 - with: - path: | - /usr/local/include/boost - /usr/local/lib/libboost* - key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - restore-keys: | - v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} - - - name: Install Boost - if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' - run: | - BOOST_VERSION="1.85.0" - BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" - wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz - cd boost_${BOOST_VERSION_UNDERSCORE} - sudo ./bootstrap.sh - sudo ./b2 install - cd .. - sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* + # # TODO: MSVC goes out of memory when building our tests + # # - name: Run tests + # # shell: bash + # # run: | + # # cd build + # # cmake --build . --config Release --target tests + # # # TODO: run tests + # # - name: Run node tests + # # shell: bash + # # run: | + # # ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf + + # # mkdir -p test/data/ch + # # cp test/data/monaco.osrm* test/data/ch/ + # # ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm + + # # ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm + # # node test/nodejs/index.js + # - name: Build Node package + # shell: bash + # run: ./scripts/ci/node_package.sh + # - name: Publish Node package + # if: ${{ env.PUBLISH == 'On' }} + # uses: ncipollo/release-action@v1 + # with: + # allowUpdates: true + # artifactErrorsFailBuild: true + # artifacts: build/stage/**/*.tar.gz + # omitBody: true + # omitBodyDuringUpdate: true + # omitName: true + # omitNameDuringUpdate: true + # replacesArtifacts: true + # token: ${{ secrets.GITHUB_TOKEN }} + + # format-taginfo-docs: + # runs-on: ubuntu-22.04 + # steps: + # - uses: actions/checkout@v4 + # - name: Use Node.js + # uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - name: Enable Node.js cache + # uses: actions/cache@v4 + # with: + # path: ~/.npm + # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + # restore-keys: | + # ${{ runner.os }}-node- + # - name: Prepare environment + # run: | + # npm ci --ignore-scripts + # clang-format-15 --version + # - name: Run checks + # run: | + # ./scripts/check_taginfo.py taginfo.json profiles/car.lua + # ./scripts/format.sh && ./scripts/error_on_dirty.sh + # node ./scripts/validate_changelog.js + # npm run docs && ./scripts/error_on_dirty.sh + # npm audit --production + + # docker-image-matrix: + # strategy: + # matrix: + # docker-base-image: ["debian", "alpine"] + # needs: format-taginfo-docs + # runs-on: ubuntu-22.04 + # continue-on-error: false + # steps: + # - name: Check out the repo + # uses: actions/checkout@v4 + # - name: Enable osm.pbf cache + # uses: actions/cache@v4 + # with: + # path: berlin-latest.osm.pbf + # key: v1-berlin-osm-pbf + # restore-keys: | + # v1-berlin-osm-pbf + # - name: Docker build + # run: | + # docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} . + # - name: Test Docker image + # run: | + # if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then + # wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf + # fi + # TAG=osrm-backend-local + # # when `--memory-swap` value equals `--memory` it means container won't use swap + # # see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details + # MEMORY_ARGS="--memory=1g --memory-swap=1g" + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson + # if [ ! -s "${PWD}/berlin-latest.geojson" ] + # then + # >&2 echo "No berlin-latest.geojson found" + # exit 1 + # fi + # # removing `.osrm.nbg` to check that whole pipeline works without it + # rm -rf "${PWD}/berlin-latest.osrm.nbg" + + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm + # docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm + # docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm & + # curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" + # docker stop osrm-container + + # build-test-publish: + # needs: format-taginfo-docs + # strategy: + # matrix: + # include: + # - name: gcc-13-debug-cov + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: gcc-13 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: g++-13 + # ENABLE_COVERAGE: ON + + # - name: clang-18-debug-asan-ubsan + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CUCUMBER_TIMEOUT: 20000 + # CXXCOMPILER: clang++-18 + # ENABLE_SANITIZER: ON + # TARGET_ARCH: x86_64-asan-ubsan + # OSRM_CONNECTION_RETRIES: 10 + # OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5 + + # - name: clang-18-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF + + # - name: clang-18-debug + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF + + # - name: clang-18-debug-clang-tidy + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Debug + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_CLANG_TIDY: ON + + + # - name: clang-17-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-17 + # CXXCOMPILER: clang++-17 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF + + # - name: clang-16-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_LTO: OFF + + # - name: conan-linux-debug-asan-ubsan + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_SANITIZER: ON + # ENABLE_LTO: OFF + + # - name: conan-linux-release + # continue-on-error: false + # node: 18 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: clang-18 + # CXXCOMPILER: clang++-18 + # ENABLE_CONAN: ON + # ENABLE_LTO: OFF + + # - name: gcc-14-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-14 + # CXXCOMPILER: g++-14 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + + # - name: gcc-13-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-13 + # CXXCOMPILER: g++-13 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + + # - name: gcc-12-release + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-22.04 + # BUILD_TOOLS: ON + # BUILD_TYPE: Release + # CCOMPILER: gcc-12 + # CXXCOMPILER: g++-12 + # CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized' + + # - name: conan-linux-release-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Release + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON + + # - name: conan-linux-debug-node + # build_node_package: true + # continue-on-error: false + # node: 20 + # runs-on: ubuntu-24.04 + # BUILD_TYPE: Debug + # CCOMPILER: clang-16 + # CXXCOMPILER: clang++-16 + # ENABLE_CONAN: ON + # NODE_PACKAGE_TESTS_ONLY: ON + + # - name: conan-macos-x64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-13 # x86_64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON + + # - name: conan-macos-arm64-release-node + # build_node_package: true + # continue-on-error: true + # node: 20 + # runs-on: macos-14 # arm64 + # BUILD_TYPE: Release + # CCOMPILER: clang + # CXXCOMPILER: clang++ + # CUCUMBER_TIMEOUT: 60000 + # ENABLE_ASSERTIONS: ON + # ENABLE_CONAN: ON + + # name: ${{ matrix.name}} + # continue-on-error: ${{ matrix.continue-on-error }} + # runs-on: ${{ matrix.runs-on }} + # env: + # BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }} + # BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + # BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }} + # CCOMPILER: ${{ matrix.CCOMPILER }} + # CFLAGS: ${{ matrix.CFLAGS }} + # CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }} + # CXXCOMPILER: ${{ matrix.CXXCOMPILER }} + # CXXFLAGS: ${{ matrix.CXXFLAGS }} + # ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }} + # ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }} + # ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }} + # ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }} + # ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }} + # NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }} + # TARGET_ARCH: ${{ matrix.TARGET_ARCH }} + # OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }} + # OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }} + # ENABLE_LTO: ${{ matrix.ENABLE_LTO }} + # steps: + # - uses: actions/checkout@v4 + # - name: Build machine architecture + # run: uname -m + # - name: Use Node.js + # uses: actions/setup-node@v4 + # with: + # node-version: ${{ matrix.node }} + # - name: Enable Node.js cache + # uses: actions/cache@v4 + # with: + # path: ~/.npm + # key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + # restore-keys: | + # ${{ runner.os }}-node- + # - name: Enable compiler cache + # uses: actions/cache@v4 + # with: + # path: ~/.ccache + # key: ccache-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # ccache-${{ matrix.name }}- + # - name: Enable Conan cache + # uses: actions/cache@v4 + # with: + # path: ~/.conan + # key: v9-conan-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # v9-conan-${{ matrix.name }}- + # - name: Enable test cache + # uses: actions/cache@v4 + # with: + # path: ${{github.workspace}}/test/cache + # key: v4-test-${{ matrix.name }}-${{ github.sha }} + # restore-keys: | + # v4-test-${{ matrix.name }}- + # - name: Prepare environment + # run: | + # echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV + # mkdir -p $HOME/.ccache + + # PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)") + # echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV + # echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV + # echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV + # if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then + # # We can only set this after checkout once we know the workspace directory + # echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV + # echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV + # echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV + # fi + + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV + # fi + # # See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041 + # # We need it to be able to access system folders while restoring cached Boost below + # - name: Give tar root ownership + # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar + # - name: Cache Boost + # if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # id: cache-boost + # uses: actions/cache@v4 + # with: + # path: | + # /usr/local/include/boost + # /usr/local/lib/libboost* + # key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + # restore-keys: | + # v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }} + + # - name: Install Boost + # if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON' + # run: | + # BOOST_VERSION="1.85.0" + # BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}" + # wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + # tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + # cd boost_${BOOST_VERSION_UNDERSCORE} + # sudo ./bootstrap.sh + # sudo ./b2 install + # cd .. + # sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}* - - name: Install dev dependencies - run: | - python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages - - # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 - # and that's why CI cannot find conan executable installed above - if [[ "${RUNNER_OS}" == "macOS" ]]; then - echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH - fi - - # ccache - if [[ "${RUNNER_OS}" == "Linux" ]]; then - sudo apt-get update -y && sudo apt-get install ccache - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - brew install ccache - fi - - # Linux dev packages - if [ "${ENABLE_CONAN}" != "ON" ]; then - sudo apt-get update -y - sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev - if [[ "${CCOMPILER}" != clang-* ]]; then - sudo apt-get install -y ${CXXCOMPILER} - fi - if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - sudo apt-get install -y lcov - fi - fi - - # TBB - TBB_VERSION=2021.12.0 - if [[ "${RUNNER_OS}" == "Linux" ]]; then - TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" - elif [[ "${RUNNER_OS}" == "macOS" ]]; then - TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" - fi - wget --tries 5 ${TBB_URL} -O onetbb.tgz - tar zxvf onetbb.tgz - sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ - sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ - - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 - if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} - run: | - sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq - - conan config init - yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" - - name: Prepare build - run: | - mkdir ${OSRM_BUILD_DIR} - ccache --max-size=256M - npm ci --ignore-scripts - if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then - lcov --directory . --zerocounters # clean cached files - fi - echo "CC=${CCOMPILER}" >> $GITHUB_ENV - echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV - if [[ "${RUNNER_OS}" == "macOS" ]]; then - # missing from GCC path, needed for conan builds of libiconv, for example. - sudo xcode-select --switch /Library/Developer/CommandLineTools - echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV - echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV - fi - - - name: Build and install OSRM - run: | - echo "Using ${JOBS} jobs" - pushd ${OSRM_BUILD_DIR} + # - name: Install dev dependencies + # run: | + # python3 -m pip install "conan<2.0.0" || python3 -m pip install "conan<2.0.0" --break-system-packages + + # # workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499 + # # and that's why CI cannot find conan executable installed above + # if [[ "${RUNNER_OS}" == "macOS" ]]; then + # echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH + # fi + + # # ccache + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # sudo apt-get update -y && sudo apt-get install ccache + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # brew install ccache + # fi + + # # Linux dev packages + # if [ "${ENABLE_CONAN}" != "ON" ]; then + # sudo apt-get update -y + # sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev + # if [[ "${CCOMPILER}" != clang-* ]]; then + # sudo apt-get install -y ${CXXCOMPILER} + # fi + # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + # sudo apt-get install -y lcov + # fi + # fi + + # # TBB + # TBB_VERSION=2021.12.0 + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz" + # elif [[ "${RUNNER_OS}" == "macOS" ]]; then + # TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz" + # fi + # wget --tries 5 ${TBB_URL} -O onetbb.tgz + # tar zxvf onetbb.tgz + # sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/ + # sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/ + # - name: Add Clang 18 to list of Conan compilers # workaround for the issue that Conan 1.x doesn't know about Clang 18 + # if: ${{ matrix.ENABLE_CONAN == 'ON' && matrix.CCOMPILER == 'clang-18' }} + # run: | + # sudo wget https://github.com/mikefarah/yq/releases/download/v4.9.6/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + + # conan config init + # yq eval '.compiler.clang.version += ["18"]' -i "$HOME/.conan/settings.yml" + # - name: Prepare build + # run: | + # mkdir ${OSRM_BUILD_DIR} + # ccache --max-size=256M + # npm ci --ignore-scripts + # if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then + # lcov --directory . --zerocounters # clean cached files + # fi + # echo "CC=${CCOMPILER}" >> $GITHUB_ENV + # echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV + # if [[ "${RUNNER_OS}" == "macOS" ]]; then + # # missing from GCC path, needed for conan builds of libiconv, for example. + # sudo xcode-select --switch /Library/Developer/CommandLineTools + # echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV + # echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV + # fi + + # - name: Build and install OSRM + # run: | + # echo "Using ${JOBS} jobs" + # pushd ${OSRM_BUILD_DIR} - ccache --zero-stats - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ - -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ - -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ - -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ - -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ - -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ - -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ - -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ - -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ - -DENABLE_CCACHE=ON \ - -DENABLE_LTO=${ENABLE_LTO:-ON} \ - -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} - make --jobs=${JOBS} - - if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then - make tests --jobs=${JOBS} - make benchmarks --jobs=${JOBS} - - sudo make install - if [[ "${RUNNER_OS}" == "Linux" ]]; then - echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV - fi - echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV - fi - popd - - name: Build example - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - run: | - mkdir example/build && pushd example/build - cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} - make --jobs=${JOBS} - popd - - name: Run all tests - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} - run: | - make -C test/data benchmark - - # macOS SIP strips the linker path. Reset this inside the running shell - export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} - ./example/build/osrm-example test/data/mld/monaco.osrm - - # All tests assume to be run from the build directory - pushd ${OSRM_BUILD_DIR} - for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done - if [ -z "${ENABLE_SANITIZER}" ]; then - npm run nodejs-tests - fi - popd - npm test - - - name: Use Node 18 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: 18 - - name: Run Node package tests on Node 18 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests - - name: Use Node 20 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: 20 - - name: Run Node package tests on Node 20 - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests - - name: Use Node latest - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - uses: actions/setup-node@v4 - with: - node-version: latest - - name: Run Node package tests on Node-latest - if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} - run: | - node --version - npm run nodejs-tests - - - name: Upload test logs - uses: actions/upload-artifact@v4 - if: failure() - with: - name: logs - path: test/logs/ - - # - name: Generate code coverage - # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # run: | - # lcov --directory . --capture --output-file coverage.info # capture coverage info - # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system - # lcov --list coverage.info #debug info + # ccache --zero-stats + # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + # -DENABLE_CONAN=${ENABLE_CONAN:-OFF} \ + # -DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \ + # -DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \ + # -DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \ + # -DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \ + # -DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \ + # -DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \ + # -DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \ + # -DENABLE_CCACHE=ON \ + # -DENABLE_LTO=${ENABLE_LTO:-ON} \ + # -DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR} + # make --jobs=${JOBS} + + # if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then + # make tests --jobs=${JOBS} + # make benchmarks --jobs=${JOBS} + + # sudo make install + # if [[ "${RUNNER_OS}" == "Linux" ]]; then + # echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV + # fi + # echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV + # fi + # popd + # - name: Build example + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + # run: | + # mkdir example/build && pushd example/build + # cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} + # make --jobs=${JOBS} + # popd + # - name: Run all tests + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }} + # run: | + # make -C test/data benchmark + + # # macOS SIP strips the linker path. Reset this inside the running shell + # export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }} + # ./example/build/osrm-example test/data/mld/monaco.osrm + + # # All tests assume to be run from the build directory + # pushd ${OSRM_BUILD_DIR} + # for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done + # if [ -z "${ENABLE_SANITIZER}" ]; then + # npm run nodejs-tests + # fi + # popd + # npm test + + # - name: Use Node 18 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: 18 + # - name: Run Node package tests on Node 18 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests + # - name: Use Node 20 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: 20 + # - name: Run Node package tests on Node 20 + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests + # - name: Use Node latest + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # uses: actions/setup-node@v4 + # with: + # node-version: latest + # - name: Run Node package tests on Node-latest + # if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }} + # run: | + # node --version + # npm run nodejs-tests + + # - name: Upload test logs + # uses: actions/upload-artifact@v4 + # if: failure() + # with: + # name: logs + # path: test/logs/ + + # # - name: Generate code coverage + # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # # run: | + # # lcov --directory . --capture --output-file coverage.info # capture coverage info + # # lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system + # # lcov --list coverage.info #debug info - # # Uploading report to CodeCov - # - name: Upload code coverage - # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} - # uses: codecov/codecov-action@v4 - # with: - # files: coverage.info - # name: codecov-osrm-backend - # fail_ci_if_error: true - # verbose: true - - name: Build Node package - if: ${{ matrix.build_node_package }} - run: ./scripts/ci/node_package.sh - - name: Publish Node package - if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} - uses: ncipollo/release-action@v1 - with: - allowUpdates: true - artifactErrorsFailBuild: true - artifacts: build/stage/**/*.tar.gz - omitBody: true - omitBodyDuringUpdate: true - omitName: true - omitNameDuringUpdate: true - replacesArtifacts: true - token: ${{ secrets.GITHUB_TOKEN }} - - name: Show CCache statistics - run: | - ccache -p - ccache -s + # # # Uploading report to CodeCov + # # - name: Upload code coverage + # # if: ${{ matrix.ENABLE_COVERAGE == 'ON' }} + # # uses: codecov/codecov-action@v4 + # # with: + # # files: coverage.info + # # name: codecov-osrm-backend + # # fail_ci_if_error: true + # # verbose: true + # - name: Build Node package + # if: ${{ matrix.build_node_package }} + # run: ./scripts/ci/node_package.sh + # - name: Publish Node package + # if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }} + # uses: ncipollo/release-action@v1 + # with: + # allowUpdates: true + # artifactErrorsFailBuild: true + # artifacts: build/stage/**/*.tar.gz + # omitBody: true + # omitBodyDuringUpdate: true + # omitName: true + # omitNameDuringUpdate: true + # replacesArtifacts: true + # token: ${{ secrets.GITHUB_TOKEN }} + # - name: Show CCache statistics + # run: | + # ccache -p + # ccache -s benchmarks: if: github.event_name == 'pull_request' - needs: [format-taginfo-docs] + # needs: [format-taginfo-docs] runs-on: self-hosted env: CCOMPILER: clang-16 @@ -731,9 +731,13 @@ jobs: sudo umount ~/benchmarks | true rm -rf ~/benchmarks mkdir -p ~/benchmarks + + SHIELD=bench + sudo cset shield --reset | true # see https://llvm.org/docs/Benchmarking.html - name: Run PR Benchmarks run: | + SHIELD=bench sudo cset shield -c 2-3 -k on sudo mount -t tmpfs -o size=4g none ~/benchmarks cp -rf pr/build ~/benchmarks/build @@ -744,9 +748,13 @@ jobs: sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks + + sleep 5 + # sudo cset shield --move --cpu=0-1 sudo cset shield --reset - name: Run Base Benchmarks run: | + SHIELD=bench sudo cset shield -c 2-3 -k on sudo mount -t tmpfs -o size=4g none ~/benchmarks cp -rf base/build ~/benchmarks/build @@ -760,8 +768,11 @@ jobs: cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json fi # we intentionally use scripts from PR branch to be able to update them and see results in the same PR - sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv + sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv sudo umount ~/benchmarks + + sleep 5 + # sudo cset shield --move --cpu=0-1 sudo cset shield --reset - name: Post Benchmark Results run: | @@ -771,9 +782,9 @@ jobs: ccache -p ccache -s - ci-complete: - runs-on: ubuntu-22.04 - needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] - steps: - - run: echo "CI complete" + # ci-complete: + # runs-on: ubuntu-22.04 + # needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks] + # steps: + # - run: echo "CI complete" diff --git a/include/engine/routing_algorithms/routing_base_ch.hpp b/include/engine/routing_algorithms/routing_base_ch.hpp index e2e3b54006..f6872293c3 100644 --- a/include/engine/routing_algorithms/routing_base_ch.hpp +++ b/include/engine/routing_algorithms/routing_base_ch.hpp @@ -460,6 +460,19 @@ void search(SearchEngineData &engine_working_data, duration_upper_bound); } +inline std::vector getNetworkDistances( + SearchEngineData &, + const DataFacade &, + SearchEngineData::QueryHeap &, + const std::vector::QueryHeap>> &, + const PhantomNode &, + const std::vector &, + EdgeWeight /*duration_upper_bound*/ = INVALID_EDGE_WEIGHT) +{ + std::vector distances; + return distances; +} + // Requires the heaps for be empty // If heaps should be adjusted to be initialized outside of this function, // the addition of force_step parameters might be required diff --git a/include/engine/routing_algorithms/routing_base_mld.hpp b/include/engine/routing_algorithms/routing_base_mld.hpp index aedbcf75ad..f71f65deb9 100644 --- a/include/engine/routing_algorithms/routing_base_mld.hpp +++ b/include/engine/routing_algorithms/routing_base_mld.hpp @@ -38,10 +38,13 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition, return INVALID_LEVEL_ID; }; - return std::min(std::min(level(source.forward_segment_id, target.forward_segment_id), - level(source.forward_segment_id, target.reverse_segment_id)), - std::min(level(source.reverse_segment_id, target.forward_segment_id), - level(source.reverse_segment_id, target.reverse_segment_id))); + auto res = std::min(std::min(level(source.forward_segment_id, target.forward_segment_id), + level(source.forward_segment_id, target.reverse_segment_id)), + std::min(level(source.reverse_segment_id, target.forward_segment_id), + level(source.reverse_segment_id, target.reverse_segment_id))); + + // std::cerr << "OLD!!! " << (int)res << std::endl; + return res; } template @@ -92,6 +95,7 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition, getNodeQueryLevel(partition, node, source, target)); })); }); + // std::cerr << "NEW " << (int)min_level << std::endl; return min_level; } @@ -140,6 +144,8 @@ inline LevelID getNodeQueryLevel(const MultiLevelPartition &partition, highest_different_level(phantom_node.reverse_segment_id)); return std::min(current_level, highest_level); }); + + // std::cerr << "NEW!!! " << (int)node_level << std::endl; return node_level; } @@ -300,7 +306,6 @@ void relaxOutgoingEdges(const DataFacade &facade, const auto &metric = facade.GetCellMetric(); const auto level = getNodeQueryLevel(partition, heapNode.node, args...); - static constexpr auto IS_MAP_MATCHING = std::is_same_v::MapMatchingQueryHeap, Heap>; @@ -457,6 +462,15 @@ void routingStep(const DataFacade &facade, BOOST_ASSERT(!facade.ExcludeNode(heapNode.node)); + if (DIRECTION == FORWARD_DIRECTION) + { + // std::cerr << "FORWARDO " << heapNode.node << std::endl; + } + else + { + // std::cerr << "REVERSEO " << heapNode.node << std::endl; + } + // Upper bound for the path source -> target with // weight(source -> node) = weight weight(to -> target) ≤ reverse_weight // is weight + reverse_weight @@ -644,6 +658,7 @@ searchDistance(SearchEngineData &, auto [middle, _] = *searchResult; + // std::cerr << "old " << middle << std::endl; auto distance = forward_heap.GetData(middle).distance + reverse_heap.GetData(middle).distance; return distance; @@ -763,6 +778,307 @@ double getNetworkDistance(SearchEngineData &engine_working_data, return from_alias(distance); } + +template +std::vector +runSearch2(const DataFacade &facade, + Heap &forward_heap, + const std::vector> &reverse_heap, + size_t candidatesCount, + const std::vector &force_step_nodes, + EdgeWeight weight_upper_bound, + const PhantomEndpointCandidates &candidates) +{ + // if (forward_heap.Empty() || reverse_heap.Empty()) + // { + // return {}; + // } + + // BOOST_ASSERT(!forward_heap.Empty() && forward_heap.MinKey() < INVALID_EDGE_WEIGHT); + // BOOST_ASSERT(!reverse_heap.Empty() && reverse_heap.MinKey() < INVALID_EDGE_WEIGHT); + + std::vector middles; + std::vector weights; + + middles.resize(candidatesCount, SPECIAL_NODEID); + weights.resize(candidatesCount, weight_upper_bound); + + // run two-Target Dijkstra routing step. + EdgeWeight forward_heap_min = forward_heap.MinKey(); + std::vector reverse_heap_mins; + for (size_t i = 0; i < candidatesCount; ++i) + { + reverse_heap_mins.push_back(reverse_heap[i]->MinKey()); + } + + auto shouldContinue = [&]() + { + bool cont = false; + for (size_t i = 0; i < candidatesCount; ++i) + { + if ((forward_heap.Size() + reverse_heap[i]->Size() > 0) && + (forward_heap_min + reverse_heap_mins[i]) < weights[i]) + { + cont = true; + break; + } + } + return cont; + }; + + bool cont = shouldContinue(); + while (cont) + { + if (!forward_heap.Empty()) + { + const auto heapNode = forward_heap.DeleteMinGetHeapNode(); + // std::cerr << "FORWARDN " << heapNode.node << std::endl; + // auto heapNode = routingStep2(facade, forward_heap, args...); + + for (size_t i = 0; i < candidatesCount; ++i) + { + auto &rh = reverse_heap[i]; + const auto reverseHeapNode = rh->GetHeapNodeIfWasInserted(heapNode.node); + if (reverseHeapNode) + { + auto reverse_weight = reverseHeapNode->weight; + auto path_weight = heapNode.weight + reverse_weight; + + if (!shouldForceStep(force_step_nodes, heapNode, *reverseHeapNode) && + (path_weight >= EdgeWeight{0}) && (path_weight < weights[i])) + { + middles[i] = heapNode.node; + weights[i] = path_weight; + + // auto distance = + // forward_heap.GetData(middles[i]).distance + + // reverse_heap[i]->GetData(middles[i]).distance; + // std::cerr << "RFOUNDN " << i <<" " << distance << std::endl; + } + } + } + + relaxOutgoingEdges(facade, forward_heap, heapNode, candidates); + + if (!forward_heap.Empty()) + forward_heap_min = forward_heap.MinKey(); + } + + cont = false; + for (size_t i = 0; i < candidatesCount; ++i) + { + if ((forward_heap.Size() + reverse_heap[i]->Size() > 0) && + (forward_heap_min + reverse_heap_mins[i]) < weights[i]) + { + cont = true; + } + if (!reverse_heap[i]->Empty() && (forward_heap_min + reverse_heap_mins[i]) < weights[i]) + { + const auto heapNode = reverse_heap[i]->DeleteMinGetHeapNode(); + // std::cerr << "REVERSEN " << i << " " << heapNode.node << std::endl; + + const auto reverseHeapNode = forward_heap.GetHeapNodeIfWasInserted(heapNode.node); + if (reverseHeapNode) + { + auto reverse_weight = reverseHeapNode->weight; + auto path_weight = heapNode.weight + reverse_weight; + + if (!shouldForceStep(force_step_nodes, heapNode, *reverseHeapNode) && + (path_weight >= EdgeWeight{0}) && (path_weight < weights[i])) + { + + middles[i] = heapNode.node; + weights[i] = path_weight; + + // auto distance = + // forward_heap.GetData(middles[i]).distance + + // reverse_heap[i]->GetData(middles[i]).distance; + // std::cerr << "FFOUNDN " << i << " " << distance << std::endl; + } + } + relaxOutgoingEdges( + facade, *reverse_heap[i], heapNode, candidates); + + if (!reverse_heap[i]->Empty()) + reverse_heap_mins[i] = reverse_heap[i]->MinKey(); + } + } + }; + +return middles; + // std::vector>> results; + // results.reserve(candidatesCount); + // for (size_t i = 0; i < candidatesCount; ++i) + // { + // if (weights[i] >= weight_upper_bound || SPECIAL_NODEID == middles[i]) + // { + // results.push_back({}); + // } + // else + // { + // results.push_back({{middles[i], weights[i]}}); + // } + // } + // return results; + + // // run two-Target Dijkstra routing step. + // NodeID middle = SPECIAL_NODEID; + // EdgeWeight weight = weight_upper_bound; + // EdgeWeight forward_heap_min = forward_heap.MinKey(); + // EdgeWeight reverse_heap_min = reverse_heap.MinKey(); + // while (forward_heap.Size() + reverse_heap.Size() > 0 && + // forward_heap_min + reverse_heap_min < weight) + // { + // if (!forward_heap.Empty()) + // { + // routingStep( + // facade, forward_heap, reverse_heap, middle, weight, force_step_nodes, args...); + // if (!forward_heap.Empty()) + // forward_heap_min = forward_heap.MinKey(); + // } + // if (!reverse_heap.Empty()) + // { + // routingStep( + // facade, reverse_heap, forward_heap, middle, weight, force_step_nodes, args...); + // if (!reverse_heap.Empty()) + // reverse_heap_min = reverse_heap.MinKey(); + // } + // }; + + // // No path found for both target nodes? + // if (weight >= weight_upper_bound || SPECIAL_NODEID == middle) + // { + // return {}; + // } + + // return {{middle, weight}}; +} + +template +std::vector searchDistance2( + SearchEngineData &, + const DataFacade &facade, + typename SearchEngineData::MapMatchingQueryHeap &forward_heap, + const std::vector::MapMatchingQueryHeap>> + &reverse_heaps, + size_t candidatesCount, + const std::vector &force_step_nodes, + EdgeWeight weight_upper_bound, + const PhantomEndpointCandidates &candidates) +{ + auto searchResults = runSearch2(facade, + forward_heap, + reverse_heaps, + candidatesCount, + force_step_nodes, + weight_upper_bound, + candidates); + std::vector res; + res.reserve(candidatesCount); + for (size_t i = 0; i < searchResults.size(); ++i) + { + if (searchResults[i] == SPECIAL_NODEID) + { + res.push_back(std::numeric_limits::max()); + } + else + { + auto middle = searchResults[i]; + + // std::cerr << "new " << i << " " << middle << std::endl; + + auto distance = + forward_heap.GetData(middle).distance + reverse_heaps[i]->GetData(middle).distance; + res.push_back(from_alias(distance)); + } + } + return res; +} +template +std::vector getNetworkDistances( + SearchEngineData &engine_working_data, + const DataFacade &facade, + typename SearchEngineData::MapMatchingQueryHeap &forward_heap, + const std::vector::MapMatchingQueryHeap>> + &reverse_heaps, + const PhantomNode &source_phantom, + const std::vector &target_phantoms, + EdgeWeight weight_upper_bound = INVALID_EDGE_WEIGHT) +{ + forward_heap.Clear(); + for (const auto &heap : reverse_heaps) + { + heap->Clear(); + } + // std::vector> reverse_heaps; + // const auto nodes_number = facade.GetNumberOfNodes(); + // const auto border_nodes_number = facade.GetMaxBorderNodeID() + 1; + // for (const auto &target_phantom : target_phantoms) + // { + // (void)target_phantom; + // reverse_heaps.emplace_back(std::make_unique(nodes_number, border_nodes_number)); + // } + + if (source_phantom.IsValidForwardSource()) + { + forward_heap.Insert(source_phantom.forward_segment_id.id, + EdgeWeight{0} - source_phantom.GetForwardWeightPlusOffset(), + {source_phantom.forward_segment_id.id, + false, + EdgeDistance{0} - source_phantom.GetForwardDistance()}); + } + + if (source_phantom.IsValidReverseSource()) + { + forward_heap.Insert(source_phantom.reverse_segment_id.id, + EdgeWeight{0} - source_phantom.GetReverseWeightPlusOffset(), + {source_phantom.reverse_segment_id.id, + false, + EdgeDistance{0} - source_phantom.GetReverseDistance()}); + } + + for (size_t i = 0; i < target_phantoms.size(); ++i) + { + auto &reverse_heap = *reverse_heaps[i]; + const auto &target_phantom = target_phantoms[i]; + if (target_phantom.IsValidForwardTarget()) + { + reverse_heap.Insert( + target_phantom.forward_segment_id.id, + target_phantom.GetForwardWeightPlusOffset(), + {target_phantom.forward_segment_id.id, false, target_phantom.GetForwardDistance()}); + } + + if (target_phantom.IsValidReverseTarget()) + { + reverse_heap.Insert( + target_phantom.reverse_segment_id.id, + target_phantom.GetReverseWeightPlusOffset(), + {target_phantom.reverse_segment_id.id, false, target_phantom.GetReverseDistance()}); + } + } + + // PhantomEndpoints endpoints{}; + // endpoints.push_back(source_phantom); + // for (const auto &target_phantom : target_phantoms) + // { + // endpoints.push_back(target_phantom); + // } + std::vector source_phantomes; + source_phantomes.push_back(source_phantom); + PhantomEndpointCandidates phantom_candidates{source_phantomes, target_phantoms}; + + auto distances = searchDistance2(engine_working_data, + facade, + forward_heap, + reverse_heaps, + target_phantoms.size(), + {}, + weight_upper_bound, + phantom_candidates); + return distances; +} + } // namespace osrm::engine::routing_algorithms::mld #endif // OSRM_ENGINE_ROUTING_BASE_MLD_HPP diff --git a/include/engine/search_engine_data.hpp b/include/engine/search_engine_data.hpp index 30d8e7c192..94126f944f 100644 --- a/include/engine/search_engine_data.hpp +++ b/include/engine/search_engine_data.hpp @@ -56,6 +56,7 @@ template <> struct SearchEngineData static thread_local ManyToManyHeapPtr many_to_many_heap; static thread_local SearchEngineHeapPtr map_matching_forward_heap_1; static thread_local SearchEngineHeapPtr map_matching_reverse_heap_1; + static thread_local std::vector map_matching_reverse_heaps; void InitializeOrClearMapMatchingThreadLocalStorage(unsigned number_of_nodes); @@ -133,13 +134,15 @@ template <> struct SearchEngineData static thread_local SearchEngineHeapPtr reverse_heap_1; static thread_local MapMatchingHeapPtr map_matching_forward_heap_1; static thread_local MapMatchingHeapPtr map_matching_reverse_heap_1; + static thread_local std::vector map_matching_reverse_heaps; static thread_local ManyToManyHeapPtr many_to_many_heap; void InitializeOrClearFirstThreadLocalStorage(unsigned number_of_nodes, unsigned number_of_boundary_nodes); void InitializeOrClearMapMatchingThreadLocalStorage(unsigned number_of_nodes, - unsigned number_of_boundary_nodes); + unsigned number_of_boundary_nodes, + size_t max_candidates); void InitializeOrClearManyToManyThreadLocalStorage(unsigned number_of_nodes, unsigned number_of_boundary_nodes); diff --git a/scripts/ci/run_benchmarks.sh b/scripts/ci/run_benchmarks.sh index 5b092471d3..81dee56882 100755 --- a/scripts/ci/run_benchmarks.sh +++ b/scripts/ci/run_benchmarks.sh @@ -50,26 +50,27 @@ function measure_peak_ram_and_time { } function run_benchmarks_for_folder { + rm -rf $RESULTS_FOLDER mkdir -p $RESULTS_FOLDER BENCHMARKS_FOLDER="$BINARIES_FOLDER/src/benchmarks" - echo "Running match-bench MLD" - $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" - echo "Running match-bench CH" - $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" - echo "Running route-bench MLD" - $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" - echo "Running route-bench CH" - $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" - echo "Running alias" - $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" - echo "Running json-render-bench" - $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" - echo "Running packedvector-bench" - $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" - echo "Running rtree-bench" - $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" + # echo "Running match-bench MLD" + # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/match_mld.bench" + # echo "Running match-bench CH" + # $BENCHMARKS_FOLDER/match-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/match_ch.bench" + # echo "Running route-bench MLD" + # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/mld/monaco.osrm" mld > "$RESULTS_FOLDER/route_mld.bench" + # echo "Running route-bench CH" + # $BENCHMARKS_FOLDER/route-bench "$FOLDER/test/data/ch/monaco.osrm" ch > "$RESULTS_FOLDER/route_ch.bench" + # echo "Running alias" + # $BENCHMARKS_FOLDER/alias-bench > "$RESULTS_FOLDER/alias.bench" + # echo "Running json-render-bench" + # $BENCHMARKS_FOLDER/json-render-bench "$FOLDER/test/data/portugal_to_korea.json" > "$RESULTS_FOLDER/json-render.bench" + # echo "Running packedvector-bench" + # $BENCHMARKS_FOLDER/packedvector-bench > "$RESULTS_FOLDER/packedvector.bench" + # echo "Running rtree-bench" + # $BENCHMARKS_FOLDER/rtree-bench "$FOLDER/test/data/monaco.osrm.ramIndex" "$FOLDER/test/data/monaco.osrm.fileIndex" "$FOLDER/test/data/monaco.osrm.nbg_nodes" > "$RESULTS_FOLDER/rtree.bench" cp -rf $OSM_PBF $FOLDER/data.osm.pbf @@ -83,19 +84,19 @@ function run_benchmarks_for_folder { measure_peak_ram_and_time "$BINARIES_FOLDER/osrm-contract $FOLDER/data.osrm" "$RESULTS_FOLDER/osrm_contract.bench" - for ALGORITHM in ch mld; do - for BENCH in nearest table trip route match; do - echo "Running node $BENCH $ALGORITHM" - START=$(date +%s.%N) - node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 - END=$(date +%s.%N) - DIFF=$(echo "$END - $START" | bc) - echo "Took: ${DIFF}s" - done - done - - for ALGORITHM in ch mld; do - for BENCH in nearest table trip route match; do + # for ALGORITHM in ch mld; do + # for BENCH in nearest table trip route match; do + # echo "Running node $BENCH $ALGORITHM" + # START=$(date +%s.%N) + # node $SCRIPTS_FOLDER/scripts/ci/bench.js $FOLDER/lib/binding/node_osrm.node $FOLDER/data.osrm $ALGORITHM $BENCH $GPS_TRACES > "$RESULTS_FOLDER/node_${BENCH}_${ALGORITHM}.bench" 5 + # END=$(date +%s.%N) + # DIFF=$(echo "$END - $START" | bc) + # echo "Took: ${DIFF}s" + # done + # done + + for ALGORITHM in mld; do + for BENCH in match; do echo "Running random $BENCH $ALGORITHM" START=$(date +%s.%N) $BENCHMARKS_FOLDER/bench "$FOLDER/data.osrm" $ALGORITHM $GPS_TRACES ${BENCH} > "$RESULTS_FOLDER/random_${BENCH}_${ALGORITHM}.bench" 5 || true @@ -106,28 +107,28 @@ function run_benchmarks_for_folder { done - for ALGORITHM in ch mld; do - $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & - OSRM_ROUTED_PID=$! - - # wait for osrm-routed to start - if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then - echo "osrm-routed failed to start for algorithm $ALGORITHM" - kill -9 $OSRM_ROUTED_PID - continue - fi - - for METHOD in route nearest trip table match; do - echo "Running e2e benchmark for $METHOD $ALGORITHM" - START=$(date +%s.%N) - python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench - END=$(date +%s.%N) - DIFF=$(echo "$END - $START" | bc) - echo "Took: ${DIFF}s" - done - - kill -9 $OSRM_ROUTED_PID - done + # for ALGORITHM in ch mld; do + # $BINARIES_FOLDER/osrm-routed --algorithm $ALGORITHM $FOLDER/data.osrm > /dev/null 2>&1 & + # OSRM_ROUTED_PID=$! + + # # wait for osrm-routed to start + # if ! curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true" > /dev/null 2>&1; then + # echo "osrm-routed failed to start for algorithm $ALGORITHM" + # kill -9 $OSRM_ROUTED_PID + # continue + # fi + + # for METHOD in route nearest trip table match; do + # echo "Running e2e benchmark for $METHOD $ALGORITHM" + # START=$(date +%s.%N) + # python3 $SCRIPTS_FOLDER/scripts/ci/e2e_benchmark.py --host http://localhost:5000 --method $METHOD --iterations 5 --num_requests 1000 --gps_traces_file_path $GPS_TRACES > $RESULTS_FOLDER/e2e_${METHOD}_${ALGORITHM}.bench + # END=$(date +%s.%N) + # DIFF=$(echo "$END - $START" | bc) + # echo "Took: ${DIFF}s" + # done + + # kill -9 $OSRM_ROUTED_PID + # done } run_benchmarks_for_folder diff --git a/src/engine/routing_algorithms/map_matching.cpp b/src/engine/routing_algorithms/map_matching.cpp index 45133551ef..481cf53092 100644 --- a/src/engine/routing_algorithms/map_matching.cpp +++ b/src/engine/routing_algorithms/map_matching.cpp @@ -45,7 +45,8 @@ unsigned getMedianSampleTime(const std::vector ×tamps) template inline void initializeHeap(SearchEngineData &engine_working_data, - const DataFacade &facade) + const DataFacade &facade, + size_t) { const auto nodes_number = facade.GetNumberOfNodes(); @@ -54,14 +55,92 @@ inline void initializeHeap(SearchEngineData &engine_working_data, template <> inline void initializeHeap(SearchEngineData &engine_working_data, - const DataFacade &facade) + const DataFacade &facade, + size_t max_candidates) { const auto nodes_number = facade.GetNumberOfNodes(); const auto border_nodes_number = facade.GetMaxBorderNodeID() + 1; - engine_working_data.InitializeOrClearMapMatchingThreadLocalStorage(nodes_number, - border_nodes_number); + engine_working_data.InitializeOrClearMapMatchingThreadLocalStorage( + nodes_number, border_nodes_number, max_candidates); } + +#include +#include + +template void saveVectorToFile(const std::vector &data, const std::string &filename) +{ + std::ofstream outFile(filename, std::ios::binary); + if (!outFile) + { + std::cerr << "Error opening file for writing: " << filename << std::endl; + return; + } + size_t size = data.size(); + outFile.write(reinterpret_cast(&size), sizeof(size)); + outFile.write(reinterpret_cast(data.data()), size * sizeof(T)); + outFile.close(); + if (!outFile.good()) + { + std::cerr << "Error occurred at writing time!" << std::endl; + } +} + +template bool loadVectorFromFile(std::vector &data, const std::string &filename) +{ + std::ifstream inFile(filename, std::ios::binary); + if (!inFile) + { + std::cerr << "Error opening file for reading: " << filename << std::endl; + return false; + } + size_t size; + inFile.read(reinterpret_cast(&size), sizeof(size)); + data.resize(size); + inFile.read(reinterpret_cast(data.data()), size * sizeof(T)); + inFile.close(); + if (!inFile.good()) + { + std::cerr << "Error occurred at reading time!" << std::endl; + return false; + } + return true; +} + +template void saveStructToFile(const T &data, const std::string &filename) +{ + std::ofstream outFile(filename, std::ios::binary); + if (!outFile) + { + std::cerr << "Error opening file for writing: " << filename << std::endl; + return; + } + outFile.write(reinterpret_cast(&data), sizeof(T)); + outFile.close(); + if (!outFile.good()) + { + std::cerr << "Error occurred at writing time!" << std::endl; + } +} + +template bool loadStructFromFile(T &data, const std::string &filename) +{ + std::ifstream inFile(filename, std::ios::binary); + if (!inFile) + { + std::cerr << "Error opening file for reading: " << filename << std::endl; + return false; + } + inFile.read(reinterpret_cast(&data), sizeof(T)); + inFile.close(); + if (!inFile.good()) + { + std::cerr << "Error occurred at reading time!" << std::endl; + return false; + } + return true; +} + } // namespace template @@ -144,9 +223,16 @@ SubMatchingList mapMatching(SearchEngineData &engine_working_data, return sub_matchings; } - initializeHeap(engine_working_data, facade); + size_t max_candidates = 0; + for (const auto &candidates : candidates_list) + { + max_candidates = std::max(max_candidates, candidates.size()); + } + + initializeHeap(engine_working_data, facade, max_candidates); auto &forward_heap = *engine_working_data.map_matching_forward_heap_1; auto &reverse_heap = *engine_working_data.map_matching_reverse_heap_1; + const auto &reverse_heaps = engine_working_data.map_matching_reverse_heaps; std::size_t breakage_begin = map_matching::INVALID_STATE; std::vector split_points; @@ -225,6 +311,19 @@ SubMatchingList mapMatching(SearchEngineData &engine_working_data, continue; } + // PhantomNode source; + // loadStructFromFile(source, "source.bin"); + std::vector target_phantom_nodes; + // loadVectorFromFile(target_phantom_nodes, "target.bin"); + // target_phantom_nodes.erase(target_phantom_nodes.begin()); + // target_phantom_nodes.erase(target_phantom_nodes.begin()); + // target_phantom_nodes.erase(target_phantom_nodes.begin()); + // target_phantom_nodes.erase(target_phantom_nodes.begin()); + // target_phantom_nodes.pop_back(); + // target_phantom_nodes.pop_back(); + // target_phantom_nodes.erase(target_phantom_nodes.begin() + 1); + + // target_phantom_nodes.push_back(target); for (const auto s_prime : util::irange(0UL, current_viterbi.size())) { const double emission_pr = emission_log_probabilities[t][s_prime]; @@ -233,15 +332,106 @@ SubMatchingList mapMatching(SearchEngineData &engine_working_data, { continue; } + target_phantom_nodes.push_back(current_timestamps_list[s_prime].phantom_node); + } + +// TIMER_START(NEW_DIST); + +#define MODE 1 + +#if MODE == 0 + auto new_distances = + getNetworkDistances(engine_working_data, + facade, + forward_heap, + reverse_heaps, + prev_unbroken_timestamps_list[s].phantom_node, + target_phantom_nodes, + weight_upper_bound); + std::vector old_distances; + + for (const auto &pn : target_phantom_nodes) + { + double network_distance = + getNetworkDistance(engine_working_data, + facade, + forward_heap, + reverse_heap, + prev_unbroken_timestamps_list[s].phantom_node, + pn, + weight_upper_bound); + old_distances.push_back(network_distance); + } + + for (size_t i = 0; i < old_distances.size(); ++i) + { + if (std::abs(old_distances[i] - new_distances[i]) > 0.01) + { + // saveStructToFile(prev_unbroken_timestamps_list[s].phantom_node, + // "source.bin"); + // saveVectorToFile(target_phantom_nodes, "target.bin"); + std::cerr << "OOPS " << old_distances[i] << " " << new_distances[i] + << std::endl; + // std::exit(1); + } + } + auto distances = old_distances; +#elif MODE == 1 + (void)reverse_heap; + auto distances = + getNetworkDistances(engine_working_data, + facade, + forward_heap, + reverse_heaps, + prev_unbroken_timestamps_list[s].phantom_node, + target_phantom_nodes, + weight_upper_bound); + // TIMER_STOP(NEW_DIST); +#else + // TIMER_START(OLD_DIST); + (void)reverse_heaps; + std::vector distances; + + for (const auto &pn : target_phantom_nodes) + { double network_distance = getNetworkDistance(engine_working_data, facade, forward_heap, reverse_heap, prev_unbroken_timestamps_list[s].phantom_node, - current_timestamps_list[s_prime].phantom_node, + pn, weight_upper_bound); + distances.push_back(network_distance); + } +#endif + // TIMER_STOP(OLD_DIST); + + // std::cerr << "Old: " << TIMER_MSEC(OLD_DIST) << " New: " << TIMER_MSEC(NEW_DIST) + // << std::endl; + + + size_t distance_index = 0; + for (const auto s_prime : util::irange(0UL, current_viterbi.size())) + { + const double emission_pr = emission_log_probabilities[t][s_prime]; + double new_value = prev_viterbi[s] + emission_pr; + if (current_viterbi[s_prime] > new_value) + { + continue; + } + + double network_distance = distances[distance_index]; + ++distance_index; + // double network_distance = + // getNetworkDistance(engine_working_data, + // facade, + // forward_heap, + // reverse_heap, + // prev_unbroken_timestamps_list[s].phantom_node, + // current_timestamps_list[s_prime].phantom_node, + // weight_upper_bound); // get distance diff between loc1/2 and locs/s_prime const auto d_t = std::abs(network_distance - haversine_distance); diff --git a/src/engine/search_engine_data.cpp b/src/engine/search_engine_data.cpp index e749db6c46..5392a0f287 100644 --- a/src/engine/search_engine_data.cpp +++ b/src/engine/search_engine_data.cpp @@ -15,6 +15,8 @@ thread_local SearchEngineData::SearchEngineHeapPtr SearchEngineData::map_matching_forward_heap_1; thread_local SearchEngineData::SearchEngineHeapPtr SearchEngineData::map_matching_reverse_heap_1; +thread_local std::vector::SearchEngineHeapPtr> + SearchEngineData::map_matching_reverse_heaps; thread_local SearchEngineData::ManyToManyHeapPtr SearchEngineData::many_to_many_heap; @@ -123,9 +125,11 @@ thread_local SearchEngineData::MapMatchingHeapPtr thread_local SearchEngineData::MapMatchingHeapPtr SearchEngineData::map_matching_reverse_heap_1; thread_local SearchEngineData::ManyToManyHeapPtr SearchEngineData::many_to_many_heap; +thread_local std::vector::MapMatchingHeapPtr> + SearchEngineData::map_matching_reverse_heaps; void SearchEngineData::InitializeOrClearMapMatchingThreadLocalStorage( - unsigned number_of_nodes, unsigned number_of_boundary_nodes) + unsigned number_of_nodes, unsigned number_of_boundary_nodes, size_t max_candidates) { if (map_matching_forward_heap_1.get()) { @@ -146,6 +150,16 @@ void SearchEngineData::InitializeOrClearMapMatchingThreadLocalStorage( map_matching_reverse_heap_1.reset( new MapMatchingQueryHeap(number_of_nodes, number_of_boundary_nodes)); } + + if (max_candidates > map_matching_reverse_heaps.size()) + { + size_t to_add = max_candidates - map_matching_reverse_heaps.size(); + for (unsigned i = 0; i < to_add; ++i) + { + map_matching_reverse_heaps.emplace_back( + new MapMatchingQueryHeap(number_of_nodes, number_of_boundary_nodes)); + } + } } void SearchEngineData::InitializeOrClearFirstThreadLocalStorage(