diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..de7b866 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,86 @@ +# Copyright 2024 Wei Kang (wkang@pku.edu.cn) + +# See ../../LICENSE for clarification regarding multiple authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +name: Deploy site +on: + push: + branches: + - master + +jobs: + + build: + name: Build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + - name: Install requirements + shell: bash + run: | + python3 -m pip install -r requirements.txt + + - name: Prepare k2 wheels + shell: bash + run: | + python3 scripts/generate_k2_wheel.py + python3 scripts/generate_resources.py + + - name: Build pages + shell: bash + run: | + mkdocs build -f mkdocs.yml + mkdocs build -f mkdocs-zh-CN.yml + + - name: Deploy pages to k2-fsa.org + if: github.repository_owner == 'k2-fsa' + run: | + user=${{ secrets.K2_USERNAME }} + server=${{ secrets.K2_HOST }} + port=${{ secrets.K2_PORT }} + echo "${{ secrets.K2_KEY }}" > id_rsa && chmod 600 id_rsa + scp -P $port -r -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -i id_rsa site/* $user@$server:~/www + rm id_rsa + + - name: Build pages for kingway.fun/k2 + if: github.repository_owner == 'pkufool' + shell: bash + run: | + sed -i 's/link: \//link: \/k2\//g' mkdocs.yml + sed -i 's/link: \//link: \/k2\//g' mkdocs-zh-CN.yml + mkdocs build -f mkdocs.yml + mkdocs build -f mkdocs-zh-CN.yml + + - name: Deploy pages to kingway.fun/k2 + if: github.repository_owner == 'pkufool' + run: | + user=${{ secrets.USERNAME }} + server=${{ secrets.HOST }} + port=${{ secrets.PORT }} + echo "${{ secrets.KEY }}" > id_rsa && chmod 600 id_rsa + scp -P $port -r -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -i id_rsa site/* $user@$server:~/deploy/k2 + rm id_rsa \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3bbb019 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.cache +.DS_Store diff --git a/custom/.DS_Store b/custom/.DS_Store new file mode 100644 index 0000000..c328838 Binary files /dev/null and b/custom/.DS_Store differ diff --git a/custom/assets/data/asr_model_cn.json b/custom/assets/data/asr_model_cn.json new file mode 100644 index 0000000..5032117 --- /dev/null +++ b/custom/assets/data/asr_model_cn.json @@ -0,0 +1,29 @@ +[ + { + "language" : "中英文", + "mode" : "流式", + "engine" : "onnx", + "encoder" : "zipformer", + "type": "RNNT", + "download" : "wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20.tar.bz2", + "description": "本模型由社区开发者贡献,使用了数万小时的内部商用数据,模型参数量约为 80M。\n使用方法:\nhttps://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20-bilingual-chinese-english" + }, + { + "language" : "中英文", + "mode" : "流式", + "engine" : "torch", + "encoder" : "zipformer", + "type": "RNNT", + "download" : "git lfs install\ngit clone https://huggingface.co/pfluo/k2fsa-zipformer-chinese-english-mixed", + "description": "本模型由社区开发者贡献,使用了数万小时的内部商用数据,模型参数量约为 80M。\n上述地址包含了可用于 sherpa 推理的 torch script 模型,也包含可用于导出 onnx 和 ncnn 的所有文件。" + }, + { + "language" : "中文", + "mode" : "流式", + "engine" : "onnx", + "encoder" : "zipformer", + "type": "RNNT", + "download" : "GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/csukuangfj/sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23\n\ncd sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23\n\ngit lfs pull --include \".*onnx\"", + "description": "本模型使用 WenetSpeech L 训练,参数规模为 14M。\n使用方法:\nhttps://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-zh-14m-2023-02-23-chinese" + } +] diff --git a/custom/assets/data/asr_model_en.json b/custom/assets/data/asr_model_en.json new file mode 100644 index 0000000..fbece88 --- /dev/null +++ b/custom/assets/data/asr_model_en.json @@ -0,0 +1,20 @@ +[ + { + "language" : "English & Chinese", + "mode" : "Streaming", + "engine" : "onnx", + "encoder" : "zipformer", + "type": "RNNT", + "download" : "wget https://github.com/k2-fsa/sherpa-onnx/releases/download/asr-models/sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20.tar.bz2", + "description": "This model is contributed by community users, trained on 10000+ hours inhouse data, #params is 80M.\nUsage:\nhttps://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20-bilingual-chinese-english" + }, + { + "language" : "English & Chinese", + "mode" : "streaming", + "engine" : "torch", + "encoder" : "zipformer", + "type": "RNNT", + "download" : "git lfs install\ngit clone https://huggingface.co/pfluo/k2fsa-zipformer-chinese-english-mixed", + "description": "This model is contributed by community users, trained on 10000+ hours inhouse data, #params is 80M.\nThe pre-trained model above contains torch script models for sherpa inference, as well as all files needed for exporting model to onnx and ncnn." + } +] \ No newline at end of file diff --git a/custom/assets/data/k2_whl_cn.json b/custom/assets/data/k2_whl_cn.json new file mode 100644 index 0000000..d5082d4 --- /dev/null +++ b/custom/assets/data/k2_whl_cn.json @@ -0,0 +1,1311 @@ +[ + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.3.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.3.0.dev20240229 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda12.1.torch2.3.0.dev20240229 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.3.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.3.0.dev20240229 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda11.8.torch2.3.0.dev20240229 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda12.1.torch2.2.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.2.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.2.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.2.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.7.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.7.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.6.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA115", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu115\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.5.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231220+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231220+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.7.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.7.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.7.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.6.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.7.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.6.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.7.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA115", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu115\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.5.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.8.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.8.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.7.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + } +] \ No newline at end of file diff --git a/custom/assets/data/k2_whl_en.json b/custom/assets/data/k2_whl_en.json new file mode 100644 index 0000000..d5082d4 --- /dev/null +++ b/custom/assets/data/k2_whl_en.json @@ -0,0 +1,1311 @@ +[ + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.3.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.3.0.dev20240229 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda12.1.torch2.3.0.dev20240229 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.3.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.3.0.dev20240229 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda11.8.torch2.3.0.dev20240229 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240301+cuda12.1.torch2.2.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.2.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.2.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.2.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.2.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.2.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.2.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.2.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.2", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.1.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CUDA121", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cu121\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda12.1.torch2.1.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.1.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.1.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.1.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.8.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA111", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu111\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.1.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.7.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.7.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.6.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.7.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.6.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA115", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu115\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.5.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda11.3.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.4.dev20240223+cuda10.2.torch1.10.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.4", + "os": "Macos", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231220+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231220+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.4", + "os": "Windows", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.4.dev20231022+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Windows", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Macos", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.7.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch2.0.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.9.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.9.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.9.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.9.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.9.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.8.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.8.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.8.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.8.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.8.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.7.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.7.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.7.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.7.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.7.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.6.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.6.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.6.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda10.2.torch1.6.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.7.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cuda11.6.torch1.13.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.13.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.12.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.11.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.2 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.1 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CPU", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cpu\n\n # Install k2\n pip install k2==1.24.3.dev20230726+cpu.torch1.10.0 -f https://k2-fsa.github.io/k2/cpu.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.7.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.13.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.13.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.13.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.12.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA116", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu116\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.6.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.12.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.12.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.12.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA115", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu115\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.5.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA113", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu113\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda11.3.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.11.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.11.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.11.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.2", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.2 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.2 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.1", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.1 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "1.10.0", + "platform": "CUDA102", + "install": "# Install Pytorch\n pip install torch==1.10.0 --index-url https://download.pytorch.org/whl/cu102\n\n # Install k2\n pip install k2==1.24.3.dev20230725+cuda10.2.torch1.10.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.1", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.1 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.8.torch2.0.1 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA118", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu118\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.8.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + }, + { + "build": "1.24.3", + "os": "Linux", + "pytorch": "2.0.0", + "platform": "CUDA117", + "install": "# Install Pytorch\n pip install torch==2.0.0 --index-url https://download.pytorch.org/whl/cu117\n\n # Install k2\n pip install k2==1.24.3.dev20230718+cuda11.7.torch2.0.0 -f https://k2-fsa.github.io/k2/cuda.html\n " + } +] \ No newline at end of file diff --git a/custom/assets/images/asr_huggingface.png b/custom/assets/images/asr_huggingface.png new file mode 100644 index 0000000..ade8773 Binary files /dev/null and b/custom/assets/images/asr_huggingface.png differ diff --git a/custom/assets/images/asr_huggingface_en.png b/custom/assets/images/asr_huggingface_en.png new file mode 100644 index 0000000..6245735 Binary files /dev/null and b/custom/assets/images/asr_huggingface_en.png differ diff --git a/custom/assets/images/homepage/blog.jpg b/custom/assets/images/homepage/blog.jpg new file mode 100644 index 0000000..839a1dd Binary files /dev/null and b/custom/assets/images/homepage/blog.jpg differ diff --git a/custom/assets/images/homepage/demo.jpg b/custom/assets/images/homepage/demo.jpg new file mode 100644 index 0000000..52bf12e Binary files /dev/null and b/custom/assets/images/homepage/demo.jpg differ diff --git a/custom/assets/images/homepage/event.jpg b/custom/assets/images/homepage/event.jpg new file mode 100644 index 0000000..abf4349 Binary files /dev/null and b/custom/assets/images/homepage/event.jpg differ diff --git a/custom/assets/images/homepage/get_started.jpg b/custom/assets/images/homepage/get_started.jpg new file mode 100644 index 0000000..dec89c2 Binary files /dev/null and b/custom/assets/images/homepage/get_started.jpg differ diff --git a/custom/assets/images/homepage/main.jpg b/custom/assets/images/homepage/main.jpg new file mode 100644 index 0000000..13137ce Binary files /dev/null and b/custom/assets/images/homepage/main.jpg differ diff --git a/custom/assets/images/homepage/message.jpg b/custom/assets/images/homepage/message.jpg new file mode 100644 index 0000000..0ddf94e Binary files /dev/null and b/custom/assets/images/homepage/message.jpg differ diff --git a/custom/assets/images/homepage/model.jpg b/custom/assets/images/homepage/model.jpg new file mode 100644 index 0000000..b08870c Binary files /dev/null and b/custom/assets/images/homepage/model.jpg differ diff --git a/custom/assets/images/homepage/papers.jpg b/custom/assets/images/homepage/papers.jpg new file mode 100644 index 0000000..12d579d Binary files /dev/null and b/custom/assets/images/homepage/papers.jpg differ diff --git a/custom/assets/images/homepage/resources.jpg b/custom/assets/images/homepage/resources.jpg new file mode 100644 index 0000000..a6a28f9 Binary files /dev/null and b/custom/assets/images/homepage/resources.jpg differ diff --git a/custom/assets/images/ngk-matrix.png b/custom/assets/images/ngk-matrix.png new file mode 100644 index 0000000..d408330 Binary files /dev/null and b/custom/assets/images/ngk-matrix.png differ diff --git a/custom/assets/images/zipformer/3360bb95-49f4-47f5-a458-a6edc71081ff.png b/custom/assets/images/zipformer/3360bb95-49f4-47f5-a458-a6edc71081ff.png new file mode 100644 index 0000000..d2ff90c Binary files /dev/null and b/custom/assets/images/zipformer/3360bb95-49f4-47f5-a458-a6edc71081ff.png differ diff --git a/custom/assets/images/zipformer/4441dae6-840f-4a37-b5d6-e8bc69634255.png b/custom/assets/images/zipformer/4441dae6-840f-4a37-b5d6-e8bc69634255.png new file mode 100644 index 0000000..f19703b Binary files /dev/null and b/custom/assets/images/zipformer/4441dae6-840f-4a37-b5d6-e8bc69634255.png differ diff --git a/custom/assets/images/zipformer/47cae332-1c36-4b48-9108-b8b43b052d33.png b/custom/assets/images/zipformer/47cae332-1c36-4b48-9108-b8b43b052d33.png new file mode 100644 index 0000000..0ffd9c2 Binary files /dev/null and b/custom/assets/images/zipformer/47cae332-1c36-4b48-9108-b8b43b052d33.png differ diff --git a/custom/assets/images/zipformer/552dae6e-b5ad-43e3-be81-3d140c0d65d7.png b/custom/assets/images/zipformer/552dae6e-b5ad-43e3-be81-3d140c0d65d7.png new file mode 100644 index 0000000..2f78216 Binary files /dev/null and b/custom/assets/images/zipformer/552dae6e-b5ad-43e3-be81-3d140c0d65d7.png differ diff --git a/custom/assets/images/zipformer/6c783964-2f7a-45cc-b457-079f530445c8.png b/custom/assets/images/zipformer/6c783964-2f7a-45cc-b457-079f530445c8.png new file mode 100644 index 0000000..a5aeb8d Binary files /dev/null and b/custom/assets/images/zipformer/6c783964-2f7a-45cc-b457-079f530445c8.png differ diff --git a/custom/assets/images/zipformer/6f00094f-8afd-478e-8790-94c449866659.png b/custom/assets/images/zipformer/6f00094f-8afd-478e-8790-94c449866659.png new file mode 100644 index 0000000..b73ea36 Binary files /dev/null and b/custom/assets/images/zipformer/6f00094f-8afd-478e-8790-94c449866659.png differ diff --git a/custom/assets/images/zipformer/87b2acaa-d54d-45bb-bd61-add1526b1714.png b/custom/assets/images/zipformer/87b2acaa-d54d-45bb-bd61-add1526b1714.png new file mode 100644 index 0000000..4d2589f Binary files /dev/null and b/custom/assets/images/zipformer/87b2acaa-d54d-45bb-bd61-add1526b1714.png differ diff --git a/custom/assets/images/zipformer/8869d81c-e978-4194-8303-83cb9e687fe8.png b/custom/assets/images/zipformer/8869d81c-e978-4194-8303-83cb9e687fe8.png new file mode 100644 index 0000000..7d30e2f Binary files /dev/null and b/custom/assets/images/zipformer/8869d81c-e978-4194-8303-83cb9e687fe8.png differ diff --git a/custom/assets/images/zipformer/98ff57ea-a436-4636-80d4-dd66b271a194.png b/custom/assets/images/zipformer/98ff57ea-a436-4636-80d4-dd66b271a194.png new file mode 100644 index 0000000..fcd7eb3 Binary files /dev/null and b/custom/assets/images/zipformer/98ff57ea-a436-4636-80d4-dd66b271a194.png differ diff --git a/custom/assets/images/zipformer/bf07c0ce-b908-4b0f-97f1-b77204c4b5c6.png b/custom/assets/images/zipformer/bf07c0ce-b908-4b0f-97f1-b77204c4b5c6.png new file mode 100644 index 0000000..738dae2 Binary files /dev/null and b/custom/assets/images/zipformer/bf07c0ce-b908-4b0f-97f1-b77204c4b5c6.png differ diff --git a/custom/assets/images/zipformer/e1a30198-6ee9-43e5-8b54-42c4e83b283a.png b/custom/assets/images/zipformer/e1a30198-6ee9-43e5-8b54-42c4e83b283a.png new file mode 100644 index 0000000..7690eb8 Binary files /dev/null and b/custom/assets/images/zipformer/e1a30198-6ee9-43e5-8b54-42c4e83b283a.png differ diff --git a/custom/assets/images/zipformer/e7955bcd-ce1a-4f1b-89f9-bc2e4b64fce8.png b/custom/assets/images/zipformer/e7955bcd-ce1a-4f1b-89f9-bc2e4b64fce8.png new file mode 100644 index 0000000..0bde22f Binary files /dev/null and b/custom/assets/images/zipformer/e7955bcd-ce1a-4f1b-89f9-bc2e4b64fce8.png differ diff --git a/custom/assets/javascripts/mathjax.js b/custom/assets/javascripts/mathjax.js new file mode 100644 index 0000000..c3a47eb --- /dev/null +++ b/custom/assets/javascripts/mathjax.js @@ -0,0 +1,21 @@ +window.MathJax = { + tex: { + inlineMath: [["\\(", "\\)"]], + displayMath: [["\\[", "\\]"]], + processEscapes: true, + processEnvironments: true + }, + options: { + ignoreHtmlClass: ".*|", + processHtmlClass: "arithmatex" + }, + loader: {load: ['[tex]/boldsymbol']}, + tex: {packages: {'[+]': ['boldsymbol']}} + }; + + document$.subscribe(() => { + MathJax.startup.output.clearCache() + MathJax.typesetClear() + MathJax.texReset() + MathJax.typesetPromise() + }) \ No newline at end of file diff --git a/custom/assets/javascripts/mdb.min.js b/custom/assets/javascripts/mdb.min.js new file mode 100644 index 0000000..9b324d7 --- /dev/null +++ b/custom/assets/javascripts/mdb.min.js @@ -0,0 +1,20 @@ +/*! + * MDB5 + * Version: FREE 6.2.0 + * + * + * Copyright: Material Design for Bootstrap + * https://mdbootstrap.com/ + * + * Read the license: https://mdbootstrap.com/general/license/ + * + * + * Documentation: https://mdbootstrap.com/docs/standard/ + * + * Support: https://mdbootstrap.com/support/ + * + * Contact: contact@mdbootstrap.com + * + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define("mdb",[],e):"object"==typeof exports?exports.mdb=e():t.mdb=e()}(this,function(){return n=[function(t,e,n){"use strict";var i=n(16),o=n(68),r=n(44),s=n(25),a=n(11).f,c=n(110),l=n(113),u=n(24),n=n(8),h="Array Iterator",d=s.set,f=s.getterFor(h),s=(t.exports=c(Array,"Array",function(t,e){d(this,{type:h,target:i(t),index:0,kind:e})},function(){var t=f(this),e=t.target,n=t.kind,i=t.index++;return!e||i>=e.length?(t.target=void 0,l(void 0,!0)):l("keys"==n?i:"values"==n?e[i]:[i,e[i]],!1)},"values"),r.Arguments=r.Array);if(o("keys"),o("values"),o("entries"),!u&&n&&"values"!==s.name)try{a(s,"name",{value:"values"})}catch(t){}},function(t,e,n){function i(e,t){if(e){if(e[u]!==d)try{l(e,u,d)}catch(t){e[u]=d}if(e[h]||l(e,h,t),s[t])for(var n in c)if(e[n]!==c[n])try{l(e,n,c[n])}catch(t){e[n]=c[n]}}}var o,r=n(6),s=n(114),a=n(115),c=n(0),l=n(15),n=n(7),u=n("iterator"),h=n("toStringTag"),d=c.values;for(o in s)i(r[o]&&r[o].prototype,o);i(a,"DOMTokenList")},function(t,e){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,e,n){var n=n(47),i=n.all;t.exports=n.IS_HTMLDDA?function(t){return"function"==typeof t||t===i}:function(t){return"function"==typeof t}},function(t,e,n){var n=n(30),i=Function.prototype,o=i.call,i=n&&i.bind.bind(o,o);t.exports=n?i:function(t){return function(){return o.apply(t,arguments)}}},function(t,e,n){"use strict";var i=n(22),n=n(43);i({target:"RegExp",proto:!0,forced:/./.exec!==n},{exec:n})},function(n,t,e){!function(t){function e(t){return t&&t.Math==Math&&t}n.exports=e("object"==typeof globalThis&&globalThis)||e("object"==typeof window&&window)||e("object"==typeof self&&self)||e("object"==typeof t&&t)||function(){return this}()||Function("return this")()}.call(this,e(72))},function(t,e,n){var i=n(6),o=n(34),r=n(9),s=n(53),a=n(50),n=n(49),c=i.Symbol,l=o("wks"),u=n?c.for||c:c&&c.withoutSetter||s;t.exports=function(t){return r(l,t)||(l[t]=a&&r(c,t)?c[t]:u("Symbol."+t)),l[t]}},function(t,e,n){n=n(2);t.exports=!n(function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]})},function(t,e,n){var i=n(4),o=n(37),r=i({}.hasOwnProperty);t.exports=Object.hasOwn||function(t,e){return r(o(t),e)}},function(t,e,n){"use strict";var i=n(22),o=n(59).includes,r=n(2),n=n(68);i({target:"Array",proto:!0,forced:r(function(){return!Array(1).includes()})},{includes:function(t){return o(this,t,1")})||!n||h)},function(t,e,n){var l=n(6),u=n(45).f,h=n(15),d=n(19),f=n(36),p=n(82),g=n(61);t.exports=function(t,e){var n,i,o,r=t.target,s=t.global,a=t.stat,c=s?l:a?l[r]||f(r,{}):(l[r]||{}).prototype;if(c)for(n in e){if(i=e[n],o=t.dontCallGetSet?(o=u(c,n))&&o.value:c[n],!g(s?n:r+(a?".":"#")+n,t.forced)&&void 0!==o){if(typeof i==typeof o)continue;p(i,o)}(t.sham||o&&o.sham)&&h(i,"sham",!0),d(c,n,i,t)}}},function(t,e,n){var i=n(6),o=n(3);t.exports=function(t,e){return arguments.length<2?(n=i[t],o(n)?n:void 0):i[t]&&i[t][e];var n}},function(t,e){t.exports=!1},function(t,e,n){var i,o,r,s,a=n(81),c=n(6),l=n(13),u=n(15),h=n(9),d=n(35),f=n(40),n=n(41),p="Object already initialized",g=c.TypeError,c=c.WeakMap,m=a||d.state?((r=d.state||(d.state=new c)).get=r.get,r.has=r.has,r.set=r.set,i=function(t,e){if(r.has(t))throw g(p);return e.facade=t,r.set(t,e),e},o=function(t){return r.get(t)||{}},function(t){return r.has(t)}):(n[s=f("state")]=!0,i=function(t,e){if(h(t,s))throw g(p);return e.facade=t,u(t,s,e),e},o=function(t){return h(t,s)?t[s]:{}},function(t){return h(t,s)});t.exports={set:i,get:o,has:m,enforce:function(t){return m(t)?o(t):i(t,{})},getterFor:function(e){return function(t){if(l(t)&&(t=o(t)).type===e)return t;throw g("Incompatible receiver, "+e+" required")}}}},function(t,e,n){var i=n(85);t.exports=function(t){t=+t;return t!=t||0==t?0:i(t)}},function(t,e,n){function i(){}function o(t){t.write(g("")),t.close();var e=t.parentWindow.Object;return t=null,e}var r,s=n(14),a=n(90),c=n(42),l=n(41),u=n(92),h=n(38),n=n(40),d="prototype",f="script",p=n("IE_PROTO"),g=function(t){return"<"+f+">"+t+""},m=function(){try{r=new ActiveXObject("htmlfile")}catch(t){}m="undefined"==typeof document||document.domain&&r?o(r):(t=h("iframe"),e="java"+f+":",t.style.display="none",u.appendChild(t),t.src=String(e),(e=t.contentWindow.document).open(),e.write(g("document.F=Object")),e.close(),e.F);for(var t,e,n=c.length;n--;)delete m[d][c[n]];return m()};l[p]=!0,t.exports=Object.create||function(t,e){var n;return null!==t?(i[d]=s(t),n=new i,i[d]=null,n[p]=t):n=m(),void 0===e?n:a.f(n,e)}},function(t,e,n){"use strict";var i=n(22),o=n(93).trim;i({target:"String",proto:!0,forced:n(94)("trim")},{trim:function(){return o(this)}})},function(M,H,t){var e=t(8),n=t(6),i=t(4),o=t(61),l=t(95),u=t(15),r=t(57).f,h=t(33),d=t(98),f=t(20),p=t(99),s=t(63),a=t(100),c=t(19),g=t(2),m=t(9),_=t(25).enforce,v=t(101),b=t(7),y=t(64),w=t(65),E=b("match"),x=n.RegExp,C=x.prototype,A=n.SyntaxError,T=i(C.exec),O=i("".charAt),S=i("".replace),L=i("".indexOf),R=i("".slice),B=/^\?<[^\s\d!#%&*+<=>@^][^\s!#%&*+<=>@^]*>/,I=/a/g,k=/a/g,t=new x(I)!==I,D=s.MISSED_STICKY,W=s.UNSUPPORTED_Y,b=e&&(!t||D||y||w||g(function(){return k[E]=!1,x(I)!=I||x(k)==k||"/a/i"!=x(I,"i")}));if(o("RegExp",b)){function j(t,e){var n,i,o=h(C,this),r=d(t),s=void 0===e,a=[],c=t;if(!o&&r&&s&&t.constructor===j)return t;if((r||h(C,t))&&(t=t.source,s)&&(e=p(c)),t=void 0===t?"":f(t),e=void 0===e?"":f(e),c=t,r=e=y&&"dotAll"in I&&(n=!!e&&-1"===e&&c:if(""===u||m(s,u))throw new A("Invalid capture group name");s[u]=!0,c=!(r[r.length]=[u,l]),u="";continue}c?u+=e:o+=e}return[o,r]}(t))[0],a=s[1]),s=l(x(t,e),o?this:C,j),(n||i||a.length)&&(e=_(s),n&&(e.dotAll=!0,e.raw=j(function(t){for(var e,n=t.length,i=0,o="",r=!1;i<=n;i++)"\\"===(e=O(t,i))?o+=e+O(t,++i):r||"."!==e?("["===e?r=!0:"]"===e&&(r=!1),o+=e):o+="[\\s\\S]";return o}(t),r)),i&&(e.sticky=!0),a.length)&&(e.groups=a),t!==c)try{u(s,"source",""===c?"(?:)":c)}catch(t){}return s}for(var P=r(x),N=0;P.length>N;)a(j,x,P[N++]);(C.constructor=j).prototype=C,c(n,"RegExp",j,{constructor:!0})}v("RegExp")},function(t,e,n){n=n(2);t.exports=!n(function(){var t=function(){}.bind();return"function"!=typeof t||t.hasOwnProperty("prototype")})},function(t,e){t.exports=function(t,e){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:e}}},function(t,e){t.exports=function(t){return null==t}},function(t,e,n){n=n(4);t.exports=n({}.isPrototypeOf)},function(t,e,n){var i=n(24),o=n(35);(t.exports=function(t,e){return o[t]||(o[t]=void 0!==e?e:{})})("versions",[]).push({version:"3.28.0",mode:i?"pure":"global",copyright:"© 2014-2023 Denis Pushkarev (zloirock.ru)",license:"https://github.com/zloirock/core-js/blob/v3.28.0/LICENSE",source:"https://github.com/zloirock/core-js"})},function(t,e,n){var i=n(6),n=n(36),o="__core-js_shared__",i=i[o]||n(o,{});t.exports=i},function(t,e,n){var i=n(6),o=Object.defineProperty;t.exports=function(e,n){try{o(i,e,{value:n,configurable:!0,writable:!0})}catch(t){i[e]=n}return n}},function(t,e,n){var i=n(18),o=Object;t.exports=function(t){return o(i(t))}},function(t,e,n){var i=n(6),n=n(13),o=i.document,r=n(o)&&n(o.createElement);t.exports=function(t){return r?o.createElement(t):{}}},function(t,e,n){var i=n(8),n=n(9),o=Function.prototype,r=i&&Object.getOwnPropertyDescriptor,n=n(o,"name"),s=n&&"something"===function(){}.name,i=n&&(!i||r(o,"name").configurable);t.exports={EXISTS:n,PROPER:s,CONFIGURABLE:i}},function(t,e,n){var i=n(34),o=n(53),r=i("keys");t.exports=function(t){return r[t]||(r[t]=o(t))}},function(t,e){t.exports={}},function(t,e){t.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},function(t,e,n){"use strict";var p=n(12),i=n(4),g=n(20),m=n(62),o=n(63),r=n(34),_=n(27),v=n(25).get,s=n(64),n=n(65),b=r("native-string-replace",String.prototype.replace),y=RegExp.prototype.exec,w=y,E=i("".charAt),x=i("".indexOf),C=i("".replace),A=i("".slice),T=(r=/b*/g,p(y,i=/a/,"a"),p(y,r,"a"),0!==i.lastIndex||0!==r.lastIndex),O=o.BROKEN_CARET,S=void 0!==/()??/.exec("")[1];(T||S||O||s||n)&&(w=function(t){var e,n,i,o,r,s,a=this,c=v(a),t=g(t),l=c.raw;if(l)return l.lastIndex=a.lastIndex,h=p(w,l,t),a.lastIndex=l.lastIndex,h;var u=c.groups,l=O&&a.sticky,h=p(m,a),c=a.source,d=0,f=t;if(l&&(h=C(h,"y",""),-1===x(h,"g")&&(h+="g"),f=A(t,a.lastIndex),0o;)!s(i,n=e[o++])||~c(r,n)||u(r,n);return r}},function(t,e,n){function i(a){return function(t,e,n){var i,o=c(t),r=u(o),s=l(n,r);if(a&&e!=e){for(;sb)","g");return"b"!==t.exec("b").groups.a||"bc"!=="b".replace(t,"$c")})},function(t,e){t.exports="\t\n\v\f\r                \u2028\u2029\ufeff"},function(t,e,n){var o=n(96),r=n(14),s=n(97);t.exports=Object.setPrototypeOf||("__proto__"in{}?function(){var n,i=!1,t={};try{(n=o(Object.prototype,"__proto__","set"))(t,[]),i=t instanceof Array}catch(t){}return function(t,e){return r(t),s(e),i?n(t,e):t.__proto__=e,t}}():void 0)},function(t,e,n){var i=n(7),o=n(27),n=n(11).f,r=i("unscopables"),s=Array.prototype;null==s[r]&&n(s,r,{configurable:!0,value:o(null)}),t.exports=function(t){s[r][t]=!0}},function(t,e,n){"use strict";var i,o,r=n(2),s=n(3),a=n(13),c=n(27),l=n(70),u=n(19),h=n(7),n=n(24),d=h("iterator"),h=!1;[].keys&&("next"in(o=[].keys())?(l=l(l(o)))!==Object.prototype&&(i=l):h=!0),!a(i)||r(function(){var t={};return i[d].call(t)!==t})?i={}:n&&(i=c(i)),s(i[d])||u(i,d,function(){return this}),t.exports={IteratorPrototype:i,BUGGY_SAFARI_ITERATORS:h}},function(t,e,n){var i=n(9),o=n(3),r=n(37),s=n(40),n=n(112),a=s("IE_PROTO"),c=Object,l=c.prototype;t.exports=n?c.getPrototypeOf:function(t){var e,t=r(t);return i(t,a)?t[a]:(e=t.constructor,o(e)&&t instanceof e?e.prototype:t instanceof c?l:null)}},function(t,e,n){var i=n(11).f,o=n(9),r=n(7)("toStringTag");t.exports=function(t,e,n){(t=t&&!n?t.prototype:t)&&!o(t,r)&&i(t,r,{configurable:!0,value:e})}},function(t,e){var n=function(){return this}();try{n=n||new Function("return this")()}catch(t){"object"==typeof window&&(n=window)}t.exports=n},function(t,e,n){"use strict";var i={}.propertyIsEnumerable,o=Object.getOwnPropertyDescriptor,r=o&&!i.call({1:2},1);e.f=r?function(t){t=o(this,t);return!!t&&t.enumerable}:i},function(t,e,n){var i=n(4),o=n(2),r=n(17),s=Object,a=i("".split);t.exports=o(function(){return!s("z").propertyIsEnumerable(0)})?function(t){return"String"==r(t)?a(t,""):s(t)}:s},function(t,e,n){var i=n(12),o=n(13),r=n(48),s=n(51),a=n(79),n=n(7),c=TypeError,l=n("toPrimitive");t.exports=function(t,e){if(!o(t)||r(t))return t;var n=s(t,l);if(n){if(n=i(n,t,e=void 0===e?"default":e),!o(n)||r(n))return n;throw c("Can't convert object to primitive value")}return a(t,e=void 0===e?"number":e)}},function(t,e,n){var i,o,r=n(6),n=n(77),s=r.process,r=r.Deno,s=s&&s.versions||r&&r.version,r=s&&s.v8;!(o=r?0<(i=r.split("."))[0]&&i[0]<4?1:+(i[0]+i[1]):o)&&n&&(!(i=n.match(/Edge\/(\d+)/))||74<=i[1])&&(i=n.match(/Chrome\/(\d+)/))&&(o=+i[1]),t.exports=o},function(t,e){t.exports="undefined"!=typeof navigator&&String(navigator.userAgent)||""},function(t,e){var n=String;t.exports=function(t){try{return n(t)}catch(t){return"Object"}}},function(t,e,n){var o=n(12),r=n(3),s=n(13),a=TypeError;t.exports=function(t,e){var n,i;if("string"===e&&r(n=t.toString)&&!s(i=o(n,t)))return i;if(r(n=t.valueOf)&&!s(i=o(n,t)))return i;if("string"!==e&&r(n=t.toString)&&!s(i=o(n,t)))return i;throw a("Can't convert object to primitive value")}},function(t,e,n){var i=n(4),o=n(3),n=n(35),r=i(Function.toString);o(n.inspectSource)||(n.inspectSource=function(t){return r(t)}),t.exports=n.inspectSource},function(t,e,n){var i=n(6),n=n(3),i=i.WeakMap;t.exports=n(i)&&/native code/.test(String(i))},function(t,e,n){var c=n(9),l=n(83),u=n(45),h=n(11);t.exports=function(t,e,n){for(var i=l(e),o=h.f,r=u.f,s=0;s]*>)/g,_=/\$([$&'`]|\d{1,2})/g;t.exports=function(r,s,a,c,l,t){var u=a+r.length,h=c.length,e=_;return void 0!==l&&(l=o(l),e=m),p(t,e,function(t,e){var n;switch(f(e,0)){case"$":return"$";case"&":return r;case"`":return g(s,0,a);case"'":return g(s,u);case"<":n=l[g(e,1,-1)];break;default:var i,o=+e;if(0==o)return t;if(h{"use strict";n.d(e,{Z:()=>i});e=n(645),n=n.n(e)()(function(t){return t[1]});n.push([t.id,"INPUT:-webkit-autofill,SELECT:-webkit-autofill,TEXTAREA:-webkit-autofill{animation-name:onautofillstart}INPUT:not(:-webkit-autofill),SELECT:not(:-webkit-autofill),TEXTAREA:not(:-webkit-autofill){animation-name:onautofillcancel}@keyframes onautofillstart{}@keyframes onautofillcancel{}",""]);const i=n},645:t=>{"use strict";t.exports=function(n){var c=[];return c.toString=function(){return this.map(function(t){var e=n(t);return t[2]?"@media ".concat(t[2]," {").concat(e,"}"):e}).join("")},c.i=function(t,e,n){"string"==typeof t&&(t=[[null,t,""]]);var i={};if(n)for(var o=0;o{if("undefined"!=typeof window)try{var t=new window.CustomEvent("test",{cancelable:!0});if(t.preventDefault(),!0!==t.defaultPrevented)throw new Error("Could not prevent default")}catch(t){function e(t,e){var n,i;return(e=e||{}).bubbles=!!e.bubbles,e.cancelable=!!e.cancelable,(n=document.createEvent("CustomEvent")).initCustomEvent(t,e.bubbles,e.cancelable,e.detail),i=n.preventDefault,n.preventDefault=function(){i.call(this);try{Object.defineProperty(this,"defaultPrevented",{get:function(){return!0}})}catch(t){this.defaultPrevented=!0}},n}e.prototype=window.Event.prototype,window.CustomEvent=e}},379:(t,e,o)=>{"use strict";i={};var n,i,r=function(t){if(void 0===i[t]){var e=document.querySelector(t);if(window.HTMLIFrameElement&&e instanceof window.HTMLIFrameElement)try{e=e.contentDocument.head}catch(t){e=null}i[t]=e}return i[t]},l=[];function u(t){for(var e=-1,n=0;n{var e=t&&t.__esModule?()=>t.default:()=>t;return o.d(e,{a:e}),e},o.d=(t,e)=>{for(var n in e)o.o(e,n)&&!o.o(t,n)&&Object.defineProperty(t,n,{enumerable:!0,get:e[n]})},o.o=(t,e)=>Object.prototype.hasOwnProperty.call(t,e),(()=>{"use strict";var t=o(379),t=o.n(t),e=o(454);function n(t){var e;t.hasAttribute("autocompleted")||(t.setAttribute("autocompleted",""),e=new window.CustomEvent("onautocomplete",{bubbles:!0,cancelable:!0,detail:null}),t.dispatchEvent(e))||(t.value="")}function i(t){t.hasAttribute("autocompleted")&&(t.removeAttribute("autocompleted"),t.dispatchEvent(new window.CustomEvent("onautocomplete",{bubbles:!0,cancelable:!1,detail:null})))}t()(e.Z,{insert:"head",singleton:!1}),e.Z.locals,o(810),document.addEventListener("animationstart",function(t){("onautofillstart"===t.animationName?n:i)(t.target)},!0),document.addEventListener("input",function(t){("insertReplacementText"!==t.inputType&&"data"in t?i:n)(t.target)},!0)})()},,,function(M,t,e){"use strict";e.r(t),e.d(t,"Alert",function(){return Fe}),e.d(t,"Button",function(){return ee}),e.d(t,"Carousel",function(){return xn}),e.d(t,"Collapse",function(){return ea}),e.d(t,"Offcanvas",function(){return ke}),e.d(t,"Dropdown",function(){return Na}),e.d(t,"Input",function(){return Ds}),e.d(t,"Modal",function(){return $n}),e.d(t,"Popover",function(){return Jo}),e.d(t,"Ripple",function(){return Ya}),e.d(t,"ScrollSpy",function(){return gr}),e.d(t,"Tab",function(){return Mr}),e.d(t,"Toast",function(){return bs}),e.d(t,"Tooltip",function(){return Kr}),e.d(t,"Range",function(){return tc});var n={};e.r(n),e.d(n,"top",function(){return O}),e.d(n,"bottom",function(){return S}),e.d(n,"right",function(){return L}),e.d(n,"left",function(){return I}),e.d(n,"auto",function(){return Gn}),e.d(n,"basePlacements",function(){return Zn}),e.d(n,"start",function(){return Jn}),e.d(n,"end",function(){return ti}),e.d(n,"clippingParents",function(){return ei}),e.d(n,"viewport",function(){return ni}),e.d(n,"popper",function(){return ii}),e.d(n,"reference",function(){return oi}),e.d(n,"variationPlacements",function(){return ri}),e.d(n,"placements",function(){return si}),e.d(n,"beforeRead",function(){return ai}),e.d(n,"read",function(){return ci}),e.d(n,"afterRead",function(){return li}),e.d(n,"beforeMain",function(){return ui}),e.d(n,"main",function(){return hi}),e.d(n,"afterMain",function(){return di}),e.d(n,"beforeWrite",function(){return fi}),e.d(n,"write",function(){return pi}),e.d(n,"afterWrite",function(){return gi}),e.d(n,"modifierPhases",function(){return mi}),e.d(n,"applyStyles",function(){return bi}),e.d(n,"arrow",function(){return Hi}),e.d(n,"computeStyles",function(){return Fi}),e.d(n,"eventListeners",function(){return zi}),e.d(n,"flip",function(){return io}),e.d(n,"hide",function(){return so}),e.d(n,"offset",function(){return ao}),e.d(n,"popperOffsets",function(){return co}),e.d(n,"preventOverflow",function(){return lo}),e.d(n,"popperGenerator",function(){return go}),e.d(n,"detectOverflow",function(){return no}),e.d(n,"createPopperBase",function(){return mo}),e.d(n,"createPopper",function(){return _o}),e.d(n,"createPopperLite",function(){return vo}),e(5),e(28),e(29);const H=t=>{let e=t.getAttribute("data-mdb-target");return e&&"#"!==e||(t=t.getAttribute("href"),e=t&&"#"!==t?t.trim():null),e},R=t=>{t=H(t);return t?document.querySelector(t):null};const B=(o,r,s)=>{Object.keys(s).forEach(t=>{var e,n=s[t],i=r[t],i=i&&((e=i)[0]||e).nodeType?"element":null==(e=i)?"".concat(e):{}.toString.call(e).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(n).test(i))throw new Error("".concat(o.toUpperCase(),": ")+'Option "'.concat(t,'" provided type "').concat(i,'" ')+'but expected type "'.concat(n,'".'))})};const i=()=>{var t=window["jQuery"];return t&&!document.body.hasAttribute("data-mdb-no-jquery")?t:null},o=t=>{"loading"===document.readyState?document.addEventListener("DOMContentLoaded",t):t()};document.documentElement.dir;const W=t=>document.createElement(t);const F=(()=>{const i={};let o=1;return{set(t,e,n){void 0===t[e]&&(t[e]={key:e,id:o},o++),i[t[e].id]=n},get(t,e){return t&&void 0!==t[e]&&(t=t[e]).key===e?i[t.id]:null},delete(t,e){var n;void 0!==t[e]&&(n=t[e]).key===e&&(delete i[n.id],delete t[e])}}})();var r={setData(t,e,n){F.set(t,e,n)},getData(t,e){return F.get(t,e)},removeData(t,e){F.delete(t,e)}};e(21),e(0),e(1);const q=i(),z=/[^.]*(?=\..*)\.|.*/,Q=/\..*/,V=/::\d+$/,U={};let Y=1;const X={mouseenter:"mouseover",mouseleave:"mouseout"},K=["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"];function $(t,e){return e&&"".concat(e,"::").concat(Y++)||t.uidEvent||Y++}function G(t){var e=$(t);return t.uidEvent=e,U[e]=U[e]||{},U[e]}function Z(n,i,t){var o=2{{var e=s,n=c,i=t,o=a.slice(1);const r=n[i]||{};Object.keys(r).forEach(t=>{-1{var e=t.replace(V,"");(!r||-1{Object.defineProperty(u,t,{get(){return n[t]}})}),l&&u.preventDefault(),c&&t.dispatchEvent(u),u.defaultPrevented&&void 0!==s&&s.preventDefault(),u}};var s=nt;function it(t){return"true"===t||"false"!==t&&(t===Number(t).toString()?Number(t):""===t||"null"===t?null:t)}function ot(t){return t.replace(/[A-Z]/g,t=>"-".concat(t.toLowerCase()))}var c={setDataAttribute(t,e,n){t.setAttribute("data-mdb-".concat(ot(e)),n)},removeDataAttribute(t,e){t.removeAttribute("data-mdb-".concat(ot(e)))},getDataAttributes(t){if(!t)return{};const n={...t.dataset};return Object.keys(n).filter(t=>t.startsWith("mdb")).forEach(t=>{let e=t.replace(/^mdb/,"");e=e.charAt(0).toLowerCase()+e.slice(1,e.length),n[e]=it(n[t])}),n},getDataAttribute(t,e){return it(t.getAttribute("data-mdb-".concat(ot(e))))},offset(t){t=t.getBoundingClientRect();return{top:t.top+document.body.scrollTop,left:t.left+document.body.scrollLeft}},position(t){return{top:t.offsetTop,left:t.offsetLeft}},style(t,e){Object.assign(t.style,e)},toggleClass(t,e){t&&(t.classList.contains(e)?t.classList.remove(e):t.classList.add(e))},addClass(t,e){t.classList.contains(e)||t.classList.add(e)},addStyle(e,n){Object.keys(n).forEach(t=>{e.style[t]=n[t]})},removeClass(t,e){t.classList.contains(e)&&t.classList.remove(e)},hasClass(t,e){return t.classList.contains(e)}};var a={closest(t,e){return t.closest(e)},matches(t,e){return t.matches(e)},find(t){var e=1t.matches(e))},parents(t,e){var n=[];let i=t.parentNode;for(;i&&i.nodeType===Node.ELEMENT_NODE&&3!==i.nodeType;)this.matches(i,e)&&n.push(i),i=i.parentNode;return n},prev(t,e){let n=t.previousElementSibling;for(;n;){if(n.matches(e))return[n];n=n.previousElementSibling}return[]},next(t,e){let n=t.nextElementSibling;for(;n;){if(this.matches(n,e))return[n];n=n.nextElementSibling}return[]}};e(10);const rt=1e3,st="transitionend",at=e=>{let n=e.getAttribute("data-mdb-target");if(!n||"#"===n){let t=e.getAttribute("href");if(!t||!t.includes("#")&&!t.startsWith("."))return null;t.includes("#")&&!t.startsWith("#")&&(t="#".concat(t.split("#")[1])),n=t&&"#"!==t?t.trim():null}return n},ct=t=>{t=at(t);return t&&document.querySelector(t)?t:null},l=t=>{t=at(t);return t?document.querySelector(t):null},lt=t=>{t.dispatchEvent(new Event(st))},u=t=>!(!t||"object"!=typeof t)&&void 0!==(t=void 0!==t.jquery?t[0]:t).nodeType,ut=t=>u(t)?t.jquery?t[0]:t:"string"==typeof t&&0{if(!u(t)||0===t.getClientRects().length)return!1;var e="visible"===getComputedStyle(t).getPropertyValue("visibility"),n=t.closest("details:not([open])");if(n&&n!==t){t=t.closest("summary");if(t&&t.parentNode!==n)return!1;if(null===t)return!1}return e},dt=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),ft=t=>{var e;return document.documentElement.attachShadow?"function"==typeof t.getRootNode?(e=t.getRootNode())instanceof ShadowRoot?e:null:t instanceof ShadowRoot?t:t.parentNode?ft(t.parentNode):null:null},pt=()=>{},gt=t=>{t.offsetHeight},mt=()=>window.jQuery&&!document.body.hasAttribute("data-mdb-no-jquery")?window.jQuery:null,_t=[],h=()=>"rtl"===document.documentElement.dir;t=i=>{var t;t=()=>{const t=mt();if(t){const e=i.NAME,n=t.fn[e];t.fn[e]=i.jQueryInterface,t.fn[e].Constructor=i,t.fn[e].noConflict=()=>(t.fn[e]=n,i.jQueryInterface)}},"loading"===document.readyState?(_t.length||document.addEventListener("DOMContentLoaded",()=>{for(const t of _t)t()}),_t.push(t)):t()};function vt(n,i){if(!(2{if(!t)return 0;let{transitionDuration:e,transitionDelay:n}=window.getComputedStyle(t);var t=Number.parseFloat(e),i=Number.parseFloat(n);return t||i?(e=e.split(",")[0],n=n.split(",")[0],(Number.parseFloat(e)+Number.parseFloat(n))*rt):0})(i)+5;let e=!1;const o=t=>{t=t.target;t===i&&(e=!0,i.removeEventListener(st,o),d(n))};i.addEventListener(st,o),setTimeout(()=>{e||lt(i)},t)}else d(n)}const d=t=>{"function"==typeof t&&t()},bt=(t,e,n,i)=>{var o=t.length;let r=t.indexOf(e);return-1===r?!n&&i?t[o-1]:t[0]:(r+=n?1:-1,i&&(r=(r+o)%o),t[Math.max(0,Math.min(r,o-1))])},yt=/[^.]*(?=\..*)\.|.*/,wt=/\..*/,Et=/::\d+$/,xt={};let Ct=1;const At={mouseenter:"mouseover",mouseleave:"mouseout"},Tt=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function Ot(t,e){return e&&"".concat(e,"::").concat(Ct++)||t.uidEvent||Ct++}function St(t){var e=Ot(t);return t.uidEvent=e,xt[e]=xt[e]||{},xt[e]}function Lt(t,e,n){let i=2t.callable===e&&t.delegationSelector===i)}function It(t,e,n){var i="string"==typeof e,e=!i&&e||n;let o=jt(t);return[i,e,o=Tt.has(o)?o:t]}function kt(i,o,r,s,a){if("string"==typeof o&&i){let[t,e,n]=It(o,r,s);o in At&&(e=(c=e,function(t){if(!t.relatedTarget||t.relatedTarget!==t.delegateTarget&&!t.delegateTarget.contains(t.relatedTarget))return c.call(this,t)}));var c,l,u,h,d,f,s=St(i),s=s[n]||(s[n]={}),p=Lt(s,e,t?r:null);p?p.oneOff=p.oneOff&&a:(p=Ot(e,o.replace(yt,"")),(o=t?(h=i,d=r,f=e,function e(n){var i=h.querySelectorAll(d);for(let t=n["target"];t&&t!==this;t=t.parentNode)for(const o of i)if(o===t)return Nt(n,{delegateTarget:t}),e.oneOff&&Pt.off(h,n.type,d,f),f.apply(t,[n])}):(l=i,u=e,function t(e){return Nt(e,{delegateTarget:l}),t.oneOff&&Pt.off(l,e.type,u),u.apply(l,[e])})).delegationSelector=t?r:null,o.callable=e,o.oneOff=a,s[o.uidEvent=p]=o,i.addEventListener(n,o,t))}}function Dt(t,e,n,i,o){i=Lt(e[n],i,o);i&&(t.removeEventListener(n,i,Boolean(o)),delete e[n][i.uidEvent])}function jt(t){return t=t.replace(wt,""),At[t]||t}const Pt={on(t,e,n,i){kt(t,e,n,i,!1)},one(t,e,n,i){kt(t,e,n,i,!0)},off(t,e,n,i){if("string"==typeof e&&t){var[i,o,r]=It(e,n,i),s=r!==e,a=St(t),c=a[r]||{},l=e.startsWith(".");if(void 0!==o)return Object.keys(c).length?void Dt(t,a,r,o,i?n:null):void 0;if(l)for(const _ of Object.keys(a)){u=g=p=f=d=h=void 0;var u,h=t,d=a,f=_,p=e.slice(1),g=d[f]||{};for(const v of Object.keys(g))v.includes(p)&&Dt(h,d,f,(u=g[v]).callable,u.delegationSelector)}for(const b of Object.keys(c)){var m=b.replace(Et,"");s&&!e.includes(m)||Dt(t,a,r,(m=c[b]).callable,m.delegationSelector)}}},trigger(t,e,n){if("string"!=typeof e||!t)return null;var i=mt();let o=null,r=!0,s=!0,a=!1;e!==jt(e)&&i&&(o=i.Event(e,n),i(t).trigger(o),r=!o.isPropagationStopped(),s=!o.isImmediatePropagationStopped(),a=o.isDefaultPrevented());i=Nt(i=new Event(e,{bubbles:r,cancelable:!0}),n);return a&&i.preventDefault(),s&&t.dispatchEvent(i),i.defaultPrevented&&o&&o.preventDefault(),i}};function Nt(e,t){for(const[n,i]of Object.entries(t||{}))try{e[n]=i}catch(t){Object.defineProperty(e,n,{configurable:!0,get(){return i}})}return e}var f=Pt;const Mt=new Map;var Ht=function(t,e,n){Mt.has(t)||Mt.set(t,new Map);t=Mt.get(t);t.has(e)||0===t.size?t.set(e,n):console.error("Bootstrap doesn't allow more than one instance per element. Bound instance: ".concat(Array.from(t.keys())[0],"."))},Rt=function(t,e){return Mt.has(t)&&Mt.get(t).get(e)||null},Bt=function(t,e){var n;Mt.has(t)&&((n=Mt.get(t)).delete(e),0===n.size)&&Mt.delete(t)};function Wt(e){if("true"===e)return!0;if("false"===e)return!1;if(e===Number(e).toString())return Number(e);if(""===e||"null"===e)return null;if("string"!=typeof e)return e;try{return JSON.parse(decodeURIComponent(e))}catch(t){return e}}function Ft(t){return t.replace(/[A-Z]/g,t=>"-".concat(t.toLowerCase()))}var p={setDataAttribute(t,e,n){t.setAttribute("data-mdb-".concat(Ft(e)),n)},removeDataAttribute(t,e){t.removeAttribute("data-mdb-".concat(Ft(e)))},getDataAttributes(e){if(!e)return{};var n={};for(const i of Object.keys(e.dataset).filter(t=>t.startsWith("mdb")&&!t.startsWith("mdbConfig"))){let t=i.replace(/^mdb/,"");n[t=t.charAt(0).toLowerCase()+t.slice(1,t.length)]=Wt(e.dataset[i])}return n},getDataAttribute(t,e){return Wt(t.getAttribute("data-mdb-".concat(Ft(e))))}};var g=class{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){var n=u(e)?p.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof n?n:{},...u(e)?p.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t){var e=1{t.preventDefault();t=t.target.closest(qt);zt.getOrCreateInstance(t).toggle()}),t(zt);_=zt;const Qt="button",Vt="mdb.".concat(Qt);var v=".".concat(Vt);const Ut="click".concat(v),Yt="transitionend",Xt="mouseenter",Kt="mouseleave",$t="hide".concat(v),Gt="hidden".concat(v),Zt="show".concat(v),Jt="shown".concat(v),te="fixed-action-btn";class b extends _{constructor(t){super(t),this._fn={},this._element&&(r.setData(this._element,Vt,this),this._init())}static get NAME(){return Qt}static jQueryInterface(n,i){return this.each(function(){let t=r.getData(this,Vt);var e="object"==typeof n&&n;if((t||!/dispose/.test(n))&&(t=t||new b(this,e),"string"==typeof n)){if(void 0===t[n])throw new TypeError('No method named "'.concat(n,'"'));t[n](i)}})}get _actionButton(){return a.findOne(".fixed-action-btn:not(.smooth-scroll) > .btn-floating",this._element)}get _buttonListElements(){return a.find("ul .btn",this._element)}get _buttonList(){return a.findOne("ul",this._element)}get _isTouchDevice(){return"ontouchstart"in document.documentElement}show(){c.hasClass(this._element,te)&&(s.off(this._buttonList,Yt),s.trigger(this._element,Zt),this._bindListOpenTransitionEnd(),c.addStyle(this._element,{height:"".concat(this._fullContainerHeight,"px")}),this._toggleVisibility(!0))}hide(){c.hasClass(this._element,te)&&(s.off(this._buttonList,Yt),s.trigger(this._element,$t),this._bindListHideTransitionEnd(),this._toggleVisibility(!1))}dispose(){c.hasClass(this._element,te)&&(s.off(this._actionButton,Ut),this._actionButton.removeEventListener(Xt,this._fn.mouseenter),this._element.removeEventListener(Kt,this._fn.mouseleave)),super.dispose()}_init(){c.hasClass(this._element,te)&&(this._saveInitialHeights(),this._setInitialStyles(),this._bindInitialEvents())}_bindMouseEnter(){this._actionButton.addEventListener(Xt,this._fn.mouseenter=()=>{this._isTouchDevice||this.show()})}_bindMouseLeave(){this._element.addEventListener(Kt,this._fn.mouseleave=()=>{this.hide()})}_bindClick(){s.on(this._actionButton,Ut,()=>{c.hasClass(this._element,"active")?this.hide():this.show()})}_bindListHideTransitionEnd(){s.on(this._buttonList,Yt,t=>{"transform"===t.propertyName&&(s.off(this._buttonList,Yt),this._element.style.height="".concat(this._initialContainerHeight,"px"),s.trigger(this._element,Gt))})}_bindListOpenTransitionEnd(){s.on(this._buttonList,Yt,t=>{"transform"===t.propertyName&&(s.off(this._buttonList,Yt),s.trigger(this._element,Jt))})}_toggleVisibility(t){const e=t?"addClass":"removeClass";t=t?"translate(0)":"translateY(".concat(this._fullContainerHeight,"px)");c.addStyle(this._buttonList,{transform:t}),this._buttonListElements&&this._buttonListElements.forEach(t=>c[e](t,"shown")),c[e](this._element,"active")}_getHeight(t){t=window.getComputedStyle(t);return parseFloat(t.getPropertyValue("height"))}_saveInitialHeights(){this._initialContainerHeight=this._getHeight(this._element),this._initialListHeight=this._getHeight(this._buttonList),this._fullContainerHeight=this._initialContainerHeight+this._initialListHeight}_bindInitialEvents(){this._bindClick(),this._bindMouseEnter(),this._bindMouseLeave()}_setInitialStyles(){this._buttonList.style.marginBottom="".concat(this._initialContainerHeight,"px"),this._buttonList.style.transform="translateY(".concat(this._fullContainerHeight,"px)"),this._element.style.height="".concat(this._initialContainerHeight,"px")}}a.find(".fixed-action-btn").forEach(t=>{let e=b.getInstance(t);return e=e||new b(t)}),a.find('[data-mdb-toggle="button"]').forEach(t=>{let e=b.getInstance(t);return e=e||new b(t)}),o(()=>{const t=i();if(t){const e=t.fn[Qt];t.fn[Qt]=b.jQueryInterface,t.fn[Qt].Constructor=b,t.fn[Qt].noConflict=()=>(t.fn[Qt]=e,b.jQueryInterface)}});var ee=b,y={find(t){var e=1t.matches(e))},parents(t,e){var n=[];let i=t.parentNode.closest(e);for(;i;)n.push(i),i=i.parentNode.closest(e);return n},prev(t,e){let n=t.previousElementSibling;for(;n;){if(n.matches(e))return[n];n=n.previousElementSibling}return[]},next(t,e){let n=t.nextElementSibling;for(;n;){if(n.matches(e))return[n];n=n.nextElementSibling}return[]},focusableChildren(t){var e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map(t=>"".concat(t,':not([tabindex^="-"])')).join(",");return this.find(e,t).filter(t=>!dt(t)&&ht(t))}};const ne=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",ie=".sticky-top",oe="padding-right",re="margin-right";var se=class{constructor(){this._element=document.body}getWidth(){var t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const e=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,oe,t=>t+e),this._setElementAttributes(ne,oe,t=>t+e),this._setElementAttributes(ie,re,t=>t-e)}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,oe),this._resetElementAttributes(ne,oe),this._resetElementAttributes(ie,re)}isOverflowing(){return 0{var e;t!==this._element&&window.innerWidth>t.clientWidth+o||(this._saveInitialAttribute(t,n),e=window.getComputedStyle(t).getPropertyValue(n),t.style.setProperty(n,"".concat(i(Number.parseFloat(e)),"px")))})}_saveInitialAttribute(t,e){var n=t.style.getPropertyValue(e);n&&p.setDataAttribute(t,e,n)}_resetElementAttributes(t,n){this._applyManipulationCallback(t,t=>{var e=p.getDataAttribute(t,n);null===e?t.style.removeProperty(n):(p.removeDataAttribute(t,n),t.style.setProperty(n,e))})}_applyManipulationCallback(t,e){if(u(t))e(t);else for(const n of y.find(t,this._element))e(n)}};const ae="backdrop",ce="mousedown.bs.".concat(ae),le={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},ue={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};var he=class extends g{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return le}static get DefaultType(){return ue}static get NAME(){return ae}show(t){var e;this._config.isVisible?(this._append(),e=this._getElement(),this._config.isAnimated&>(e),e.classList.add("show"),this._emulateAnimation(()=>{d(t)})):d(t)}hide(t){this._config.isVisible?(this._getElement().classList.remove("show"),this._emulateAnimation(()=>{this.dispose(),d(t)})):d(t)}dispose(){this._isAppended&&(f.off(this._element,ce),this._element.remove(),this._isAppended=!1)}_getElement(){var t;return this._element||((t=document.createElement("div")).className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t),this._element}_configAfterMerge(t){return t.rootElement=ut(t.rootElement),t}_append(){var t;this._isAppended||(t=this._getElement(),this._config.rootElement.append(t),f.on(t,ce,()=>{d(this._config.clickCallback)}),this._isAppended=!0)}_emulateAnimation(t){vt(t,this._getElement(),this._config.isAnimated)}};const de=".".concat("bs.focustrap"),fe="focusin".concat(de),pe="keydown.tab".concat(de),ge="backward",me={autofocus:!0,trapElement:null},_e={autofocus:"boolean",trapElement:"element"};function ve(e){let n=1this._handleFocusin(t)),f.on(document,pe,t=>this._handleKeydown(t)),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,f.off(document,de))}_handleFocusin(t){var e=this._config["trapElement"];t.target===document||t.target===e||e.contains(t.target)||(0===(t=y.focusableChildren(e)).length?e:this._lastTabNavDirection===ge?t[t.length-1]:t[0]).focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?ge:"forward")}};var v=".".concat("bs.offcanvas"),_=".data-api",w="load".concat(v).concat(_);const ye="showing",we=".offcanvas.show",Ee="show".concat(v),xe="shown".concat(v),Ce="hide".concat(v),Ae="hidePrevented".concat(v),Te="hidden".concat(v);var E="resize".concat(v),_="click".concat(v).concat(_);const Oe="keydown.dismiss".concat(v);const Se={backdrop:!0,keyboard:!0,scroll:!1},Le={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class Ie extends m{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return Se}static get DefaultType(){return Le}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||f.trigger(this._element,Ee,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new se).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(ye),this._queueCallback(()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add("show"),this._element.classList.remove(ye),f.trigger(this._element,xe,{relatedTarget:t})},this._element,!0))}hide(){this._isShown&&!f.trigger(this._element,Ce).defaultPrevented&&(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add("hiding"),this._backdrop.hide(),this._queueCallback(()=>{this._element.classList.remove("show","hiding"),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new se).reset(),f.trigger(this._element,Te)},this._element,!0))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){var t=Boolean(this._config.backdrop);return new he({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"===this._config.backdrop?f.trigger(this._element,Ae):this.hide()}:null})}_initializeFocusTrap(){return new be({trapElement:this._element})}_addEventListeners(){f.on(this._element,Oe,t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():f.trigger(this._element,Ae))})}static jQueryInterface(e){return this.each(function(){var t=Ie.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError('No method named "'.concat(e,'"'));t[e](this)}})}}f.on(document,_,'[data-mdb-toggle="offcanvas"]',function(t){var e=l(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),dt(this)||(f.one(e,Te,()=>{ht(this)&&this.focus()}),(t=y.findOne(we))&&t!==e&&Ie.getInstance(t).hide(),Ie.getOrCreateInstance(e).toggle(this))}),f.on(window,w,()=>{for(const t of y.find(we))Ie.getOrCreateInstance(t).show()}),f.on(window,E,()=>{for(const t of y.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&Ie.getOrCreateInstance(t).hide()}),ve(Ie),t(Ie);var ke=Ie;v=".".concat("bs.alert");const De="close".concat(v),je="closed".concat(v);class Pe extends m{static get NAME(){return"alert"}close(){var t;f.trigger(this._element,De).defaultPrevented||(this._element.classList.remove("show"),t=this._element.classList.contains("fade"),this._queueCallback(()=>this._destroyElement(),this._element,t))}_destroyElement(){this._element.remove(),f.trigger(this._element,je),this.dispose()}static jQueryInterface(e){return this.each(function(){var t=Pe.getOrCreateInstance(this);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError('No method named "'.concat(e,'"'));t[e](this)}})}}ve(Pe,"close"),t(Pe);_=Pe;const Ne="alert";w="mdb.".concat(Ne),E=".".concat(w);const Me="close.bs.alert",He="closed.bs.alert",Re="close".concat(E),Be="closed".concat(E);class We extends _{constructor(t){super(t,1{s.trigger(this._element,Re)})}_bindClosedEvent(){s.on(this._element,He,()=>{s.trigger(this._element,Be)})}}a.find(".alert").forEach(t=>{var e=We.getInstance(t);e||new We(t)}),o(()=>{const t=i();if(t){const e=t.fn[Ne];t.fn[Ne]=We.jQueryInterface,t.fn[Ne].Constructor=We,t.fn[Ne].noConflict=()=>(t.fn[Ne]=e,We.jQueryInterface)}});var Fe=We;const qe=".bs.swipe",ze="touchstart".concat(qe),Qe="touchmove".concat(qe),Ve="touchend".concat(qe),Ue="pointerdown".concat(qe),Ye="pointerup".concat(qe),Xe={endCallback:null,leftCallback:null,rightCallback:null},Ke={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class $e extends g{constructor(t,e){super(),(this._element=t)&&$e.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return Xe}static get DefaultType(){return Ke}static get NAME(){return"swipe"}dispose(){f.off(this._element,qe)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),d(this._config.endCallback)}_move(t){this._deltaX=t.touches&&1this._start(t)),f.on(this._element,Ye,t=>this._end(t)),this._element.classList.add("pointer-event")):(f.on(this._element,ze,t=>this._start(t)),f.on(this._element,Qe,t=>this._move(t)),f.on(this._element,Ve,t=>this._end(t)))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||0this.nextWhenVisible(),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?f.one(this._element,on,()=>this.cycle()):this.cycle())}to(t){var e,n=this._getItems();t>n.length-1||t<0||(this._isSliding?f.one(this._element,on,()=>this.to(t)):(e=this._getItemIndex(this._getActive()))!==t&&(e=ethis._keydown(t)),"hover"===this._config.pause&&(f.on(this._element,sn,()=>this.pause()),f.on(this._element,an,()=>this._maybeEnableCycle())),this._config.touch&&Ge.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of y.find(".carousel-item img",this._element))f.on(t,cn,t=>t.preventDefault());this._swipeHelper=new Ge(this._element,{leftCallback:()=>this._slide(this._directionToOrder(tn)),rightCallback:()=>this._slide(this._directionToOrder(en)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout(()=>this._maybeEnableCycle(),500+this._config.interval))}})}_keydown(t){var e;/input|textarea/i.test(t.target.tagName)||(e=fn[t.key])&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){var e;this._indicatorsElement&&((e=y.findOne(hn,this._indicatorsElement)).classList.remove(un),e.removeAttribute("aria-current"),e=y.findOne('[data-mdb-slide-to="'.concat(t,'"]'),this._indicatorsElement))&&(e.classList.add(un),e.setAttribute("aria-current","true"))}_updateInterval(){var t=this._activeElement||this._getActive();t&&(t=Number.parseInt(t.getAttribute("data-mdb-interval"),10),this._config.interval=t||this._config.defaultInterval)}_slide(e){var t=1f.trigger(this._element,t,{relatedTarget:o,direction:this._orderToDirection(e),from:this._getItemIndex(i),to:r});t=s(nn);if(!t.defaultPrevented&&i&&o){t=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(r),this._activeElement=o;const a=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";o.classList.add(c),gt(o),i.classList.add(a),o.classList.add(a);this._queueCallback(()=>{o.classList.remove(a,c),o.classList.add(un),i.classList.remove(un,c,a),this._isSliding=!1,s(on)},i,this._isAnimated()),t&&this.cycle()}}}}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return y.findOne(".active.carousel-item",this._element)}_getItems(){return y.find(dn,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return h()?t===tn?Je:Ze:t===tn?Ze:Je}_orderToDirection(t){return h()?t===Je?tn:en:t===Je?en:tn}static jQueryInterface(e){return this.each(function(){var t=mn.getOrCreateInstance(this,e);if("number"==typeof e)t.to(e);else if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}f.on(document,_,"[data-mdb-slide], [data-mdb-slide-to]",function(t){var e=l(this);e&&e.classList.contains(ln)&&(t.preventDefault(),t=mn.getOrCreateInstance(e),(e=this.getAttribute("data-mdb-slide-to"))?t.to(e):"next"===p.getDataAttribute(this,"slide")?t.next():t.prev(),t._maybeEnableCycle())}),f.on(window,E,()=>{for(const t of y.find('[data-mdb-ride="carousel"]'))mn.getOrCreateInstance(t)}),t(mn);v=mn;const _n="carousel";w="mdb.".concat(_n),_=".".concat(w);const vn="slide.bs.carousel",bn="slid.bs.carousel",yn="slide".concat(_),wn="slid".concat(_);class En extends v{constructor(t,e){super(t,e),this._init()}dispose(){s.off(this._element,vn),s.off(this._element,bn),super.dispose()}static get NAME(){return _n}_init(){this._bindSlideEvent(),this._bindSlidEvent()}_bindSlideEvent(){s.on(this._element,vn,t=>{s.trigger(this._element,yn,{relatedTarget:t.relatedTarget,direction:t.direction,from:t.from,to:t.to})})}_bindSlidEvent(){s.on(this._element,bn,t=>{s.trigger(this._element,wn,{relatedTarget:t.relatedTarget,direction:t.direction,from:t.from,to:t.to})})}}a.find('[data-mdb-ride="carousel"]').forEach(t=>{var e=En.getInstance(t);e||new En(t,c.getDataAttributes(t))}),o(()=>{const t=i();if(t){const e=t.fn[_n];t.fn[_n]=En.jQueryInterface,t.fn[_n].Constructor=En,t.fn[_n].noConflict=()=>(t.fn[_n]=e,En.jQueryInterface)}});var xn=En;const x=".".concat("bs.modal");const Cn="hide".concat(x),An="hidePrevented".concat(x),Tn="hidden".concat(x),On="show".concat(x),Sn="shown".concat(x),Ln="resize".concat(x),In="click.dismiss".concat(x),kn="mousedown.dismiss".concat(x),Dn="keydown.dismiss".concat(x);E="click".concat(x).concat(".data-api");const jn="modal-open",Pn="modal-static";const Nn={backdrop:!0,focus:!0,keyboard:!0},Mn={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class Hn extends m{constructor(t,e){super(t,e),this._dialog=y.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new se,this._addEventListeners()}static get Default(){return Nn}static get DefaultType(){return Mn}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||f.trigger(this._element,On,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(jn),this._adjustDialog(),this._backdrop.show(()=>this._showElement(t)))}hide(){!this._isShown||this._isTransitioning||f.trigger(this._element,Cn).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove("show"),this._queueCallback(()=>this._hideModal(),this._element,this._isAnimated()))}dispose(){for(const t of[window,this._dialog])f.off(t,x);this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new he({isVisible:Boolean(this._config.backdrop)&&Boolean(!this._config.modalNonInvasive),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new be({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;var e=y.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),gt(this._element),this._element.classList.add("show");this._queueCallback(()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,f.trigger(this._element,Sn,{relatedTarget:t})},this._dialog,this._isAnimated())}_addEventListeners(){f.on(this._element,Dn,t=>{"Escape"===t.key&&(this._config.keyboard?(t.preventDefault(),this.hide()):this._triggerBackdropTransition())}),f.on(window,Ln,()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()}),f.on(this._element,kn,e=>{f.one(this._element,In,t=>{this._element===e.target&&this._element===t.target&&("static"===this._config.backdrop?this._triggerBackdropTransition():this._config.backdrop&&this.hide())})})}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide(()=>{document.body.classList.remove(jn),this._resetAdjustments(),this._scrollBar.reset(),f.trigger(this._element,Tn)})}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){var t=f.trigger(this._element,An);if(!t.defaultPrevented){t=this._element.scrollHeight>document.documentElement.clientHeight;const e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(Pn)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(Pn),this._queueCallback(()=>{this._element.classList.remove(Pn),this._queueCallback(()=>{this._element.style.overflowY=e},this._dialog)},this._dialog),this._element.focus())}}_adjustDialog(){var t,e=this._element.scrollHeight>document.documentElement.clientHeight,n=this._scrollBar.getWidth(),i=0{t.defaultPrevented||f.one(e,Tn,()=>{ht(this)&&this.focus()})}),y.find(".modal.show").forEach(t=>{t.classList.contains("modal-non-invasive-show")||Hn.getInstance(t).hide()}),Hn.getOrCreateInstance(e).toggle(this)}),ve(Hn),t(Hn);w=Hn;const Rn="modal";_="mdb.".concat(Rn),v=".".concat(_);const Bn="hide.bs.modal",Wn="hidePrevented.bs.modal",Fn="hidden.bs.modal",qn="show.bs.modal",zn="shown.bs.modal",Qn="hide".concat(v),Vn="hidePrevented".concat(v),Un="hidden".concat(v),Yn="show".concat(v),Xn="shown".concat(v);class Kn extends w{constructor(t,e){super(t,e),this._init()}dispose(){s.off(this._element,qn),s.off(this._element,zn),s.off(this._element,Bn),s.off(this._element,Fn),s.off(this._element,Wn),super.dispose()}static get NAME(){return Rn}_init(){this._bindShowEvent(),this._bindShownEvent(),this._bindHideEvent(),this._bindHiddenEvent(),this._bindHidePreventedEvent()}_bindShowEvent(){s.on(this._element,qn,t=>{s.trigger(this._element,Yn,{relatedTarget:t.relatedTarget})})}_bindShownEvent(){s.on(this._element,zn,t=>{s.trigger(this._element,Xn,{relatedTarget:t.relatedTarget})})}_bindHideEvent(){s.on(this._element,Bn,()=>{s.trigger(this._element,Qn)})}_bindHiddenEvent(){s.on(this._element,Fn,()=>{s.trigger(this._element,Un)})}_bindHidePreventedEvent(){s.on(this._element,Wn,()=>{s.trigger(this._element,Vn)})}}a.find('[data-mdb-toggle="modal"]').forEach(t=>{var t=(t=>{t=H(t);return t&&document.querySelector(t)?t:null})(t),t=a.findOne(t),e=Kn.getInstance(t);e||new Kn(t)}),o(()=>{const t=i();if(t){const e=t.fn[Rn];t.fn[Rn]=Kn.jQueryInterface,t.fn[Rn].Constructor=Kn,t.fn[Rn].noConflict=()=>(t.fn[Rn]=e,Kn.jQueryInterface)}});var $n=Kn,O="top",S="bottom",L="right",I="left",Gn="auto",Zn=[O,S,L,I],Jn="start",ti="end",ei="clippingParents",ni="viewport",ii="popper",oi="reference",ri=Zn.reduce(function(t,e){return t.concat([e+"-"+Jn,e+"-"+ti])},[]),si=[].concat(Zn,[Gn]).reduce(function(t,e){return t.concat([e,e+"-"+Jn,e+"-"+ti])},[]),ai="beforeRead",ci="read",li="afterRead",ui="beforeMain",hi="main",di="afterMain",fi="beforeWrite",pi="write",gi="afterWrite",mi=[ai,ci,li,ui,hi,di,fi,pi,gi];function C(t){return t?(t.nodeName||"").toLowerCase():null}function A(t){var e;return null==t?window:"[object Window]"!==t.toString()?(e=t.ownerDocument)&&e.defaultView||window:t}function _i(t){return t instanceof A(t).Element||t instanceof Element}function T(t){return t instanceof A(t).HTMLElement||t instanceof HTMLElement}function vi(t){return"undefined"!=typeof ShadowRoot&&(t instanceof A(t).ShadowRoot||t instanceof ShadowRoot)}var bi={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var o=t.state;Object.keys(o.elements).forEach(function(t){var e=o.styles[t]||{},n=o.attributes[t]||{},i=o.elements[t];T(i)&&C(i)&&(Object.assign(i.style,e),Object.keys(n).forEach(function(t){var e=n[t];!1===e?i.removeAttribute(t):i.setAttribute(t,!0===e?"":e)}))})},effect:function(t){var i=t.state,o={popper:{position:i.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(i.elements.popper.style,o.popper),i.styles=o,i.elements.arrow&&Object.assign(i.elements.arrow.style,o.arrow),function(){Object.keys(i.elements).forEach(function(t){var e=i.elements[t],n=i.attributes[t]||{},t=Object.keys((i.styles.hasOwnProperty(t)?i.styles:o)[t]).reduce(function(t,e){return t[e]="",t},{});T(e)&&C(e)&&(Object.assign(e.style,t),Object.keys(n).forEach(function(t){e.removeAttribute(t)}))})}},requires:["computeStyles"]};function k(t){return t.split("-")[0]}var yi=Math.max,wi=Math.min,Ei=Math.round;function xi(){var t=navigator.userAgentData;return null!=t&&t.brands?t.brands.map(function(t){return t.brand+"/"+t.version}).join(" "):navigator.userAgent}function Ci(){return!/^((?!chrome|android).)*safari/i.test(xi())}function Ai(t,e,n){void 0===e&&(e=!1),void 0===n&&(n=!1);var i=t.getBoundingClientRect(),o=1,r=1;e&&T(t)&&(o=0l[C]&&(x=Vi(x)),Vi(x)),C=[];if(i&&C.push(A[w]<=0),o&&C.push(A[x]<=0,A[E]<=0),C.every(function(t){return t})){v=y,_=!1;break}u.set(y,C)}if(_)for(var T=g?3:1;0{const n=t.nodeName.toLowerCase();return e.includes(n)?!bo.has(n)||Boolean(yo.test(t.nodeValue)||wo.test(t.nodeValue)):e.filter(t=>t instanceof RegExp).some(t=>t.test(n))})(a,r)||s.removeAttribute(a.nodeName)}else s.remove()}return n.body.innerHTML}const xo={allowList:E,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Co={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},Ao={entry:"(string|element|function|null)",selector:"(string|element)"};var To=class extends g{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return xo}static get DefaultType(){return Co}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map(t=>this._resolvePossibleFunction(t)).filter(Boolean)}hasContent(){return 0
',title:"",trigger:"hover focus"},Mo={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class Ho extends m{constructor(t,e){if(void 0===n)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,e),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return No}static get DefaultType(){return Mo}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),f.off(this._element.closest(Io),ko,this._hideModalHandler),this._element.getAttribute("data-mdb-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-mdb-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(this._isWithContent()&&this._isEnabled){var t=f.trigger(this._element,this.constructor.eventName("show")),e=(ft(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(!t.defaultPrevented&&e){this._disposePopper();t=this._getTipElement(),e=(this._element.setAttribute("aria-describedby",t.getAttribute("id")),this._config)["container"];if(this._element.ownerDocument.documentElement.contains(this.tip)||(e.append(t),f.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(t),t.classList.add(Lo),"ontouchstart"in document.documentElement)for(const n of[].concat(...document.body.children))f.on(n,"mouseover",pt);this._queueCallback(()=>{f.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1},this.tip,this._isAnimated())}}}hide(){if(this._isShown()){var t=f.trigger(this._element,this.constructor.eventName("hide"));if(!t.defaultPrevented){if(this._getTipElement().classList.remove(Lo),"ontouchstart"in document.documentElement)for(const e of[].concat(...document.body.children))f.off(e,"mouseover",pt);this._activeTrigger.click=!1,this._activeTrigger[jo]=!1,this._activeTrigger[Do]=!1,this._isHovered=null;this._queueCallback(()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),f.trigger(this._element,this.constructor.eventName("hidden")))},this.tip,this._isAnimated())}}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){t=this._getTemplateFactory(t).toHtml();if(!t)return null;t.classList.remove(So,Lo),t.classList.add("bs-".concat(this.constructor.NAME,"-auto"));var e=(t=>{for(;t+=Math.floor(1e6*Math.random()),document.getElementById(t););return t})(this.constructor.NAME).toString();return t.setAttribute("id",e),this._isAnimated()&&t.classList.add(So),t}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new To({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{".tooltip-inner":this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-mdb-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(So)}_isShown(){return this.tip&&this.tip.classList.contains(Lo)}_createPopper(t){var e="function"==typeof this._config.placement?this._config.placement.call(this,t,this._element):this._config.placement,e=Po[e.toUpperCase()];return _o(this._element,t,this._getPopperConfig(e))}_getOffset(){const e=this._config["offset"];return"string"==typeof e?e.split(",").map(t=>Number.parseInt(t,10)):"function"==typeof e?t=>e(t,this._element):e}_resolvePossibleFunction(t){return"function"==typeof t?t.call(this._element):t}_getPopperConfig(t){t={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:".".concat(this.constructor.NAME,"-arrow")}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_setListeners(){var t,e;for(const n of this._config.trigger.split(" "))"click"===n?f.on(this._element,this.constructor.eventName("click"),this._config.selector,t=>{this._initializeOnDelegatedTarget(t).toggle()}):"manual"!==n&&(t=n===Do?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),e=n===Do?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout"),f.on(this._element,t,this._config.selector,t=>{var e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?jo:Do]=!0,e._enter()}),f.on(this._element,e,this._config.selector,t=>{var e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?jo:Do]=e._element.contains(t.relatedTarget),e._leave()}));this._hideModalHandler=()=>{this._element&&this.hide()},f.on(this._element.closest(Io),ko,this._hideModalHandler)}_fixTitle(){var t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-mdb-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout(()=>{this._isHovered&&this.show()},this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout(()=>{this._isHovered||this.hide()},this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){var e=p.getDataAttributes(this._element);for(const n of Object.keys(e))Oo.has(n)&&delete e[n];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:ut(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){var t={};for(const e in this._config)this.constructor.Default[e]!==this._config[e]&&(t[e]=this._config[e]);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(e){return this.each(function(){var t=Ho.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}t(Ho);_=Ho;const Ro={..._.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},Bo={..._.DefaultType,content:"(null|string|element|function)"};class Wo extends _{static get Default(){return Ro}static get DefaultType(){return Bo}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{".popover-header":this._getTitle(),".popover-body":this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(e){return this.each(function(){var t=Wo.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}t(Wo);v=Wo;const Fo="popover";w="mdb.".concat(Fo),g=".".concat(w);const qo="show.bs.popover",zo="shown.bs.popover",Qo="hide.bs.popover",Vo="hidden.bs.popover",Uo="inserted.bs.popover",Yo="show".concat(g),Xo="shown".concat(g),Ko="hide".concat(g),$o="hidden".concat(g),Go="inserted".concat(g);class Zo extends v{constructor(t,e){super(t,e),this._init()}dispose(){s.off(this.element,qo),s.off(this.element,zo),s.off(this.element,Qo),s.off(this.element,Vo),s.off(this.element,Uo),super.dispose()}static get NAME(){return Fo}_init(){this._bindShowEvent(),this._bindShownEvent(),this._bindHideEvent(),this._bindHiddenEvent(),this._bindInsertedEvent()}_bindShowEvent(){s.on(this.element,qo,()=>{s.trigger(this.element,Yo)})}_bindShownEvent(){s.on(this.element,zo,()=>{s.trigger(this.element,Xo)})}_bindHideEvent(){s.on(this.element,Qo,()=>{s.trigger(this.element,Ko)})}_bindHiddenEvent(){s.on(this.element,Vo,()=>{s.trigger(this.element,$o)})}_bindInsertedEvent(){s.on(this.element,Uo,()=>{s.trigger(this.element,Go)})}}a.find('[data-mdb-toggle="popover"]').forEach(t=>{var e=Zo.getInstance(t);e||new Zo(t)}),o(()=>{const t=i();if(t){const e=t.fn[Fo];t.fn[Fo]=Zo.jQueryInterface,t.fn[Fo].Constructor=Zo,t.fn[Fo].noConflict=()=>(t.fn[Fo]=e,Zo.jQueryInterface)}});var Jo=Zo;E=".".concat("bs.scrollspy");const tr="activate".concat(E),er="click".concat(E);"load".concat(E).concat(".data-api");const nr="active";const ir="[href]";w=".nav-link";const or="".concat(w,", ").concat(".nav-item"," > ").concat(w,", ").concat(".list-group-item"),rr={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},sr={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class ar extends m{constructor(t,e){super(t,e),this._config.target&&(this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh())}static get Default(){return rr}static get DefaultType(){return sr}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer&&this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=ut(t.target)||document.body,t.rootMargin=t.offset?"".concat(t.offset,"px 0px -30%"):t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map(t=>Number.parseFloat(t))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(f.off(this._config.target,er),f.on(this._config.target,er,ir,t=>{var e=this._observableSections.get(t.target.hash);e&&(t.preventDefault(),t=this._rootElement||window,e=e.offsetTop-this._element.offsetTop,t.scrollTo?t.scrollTo({top:e,behavior:"smooth"}):t.scrollTop=e)}))}_getNewObserver(){var t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver(t=>this._observerCallback(t),t)}_observerCallback(t){const e=t=>this._targetLinks.get("#".concat(t.target.id));var n=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},i=(this._rootElement||document.documentElement).scrollTop,o=i>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=i;for(const s of t)if(s.isIntersecting){var r=s.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(o&&r){if(n(s),i)continue;return}o||r||n(s)}else this._activeTarget=null,this._clearActiveClass(e(s))}_initializeTargetsAndObservables(){var t;this._targetLinks=new Map,this._observableSections=new Map;for(const e of y.find(ir,this._config.target))e.hash&&!dt(e)&&(t=y.findOne(e.hash,this._element),ht(t))&&(this._targetLinks.set(e.hash,e),this._observableSections.set(e.hash,t))}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),(this._activeTarget=t).classList.add(nr),this._activateParents(t),f.trigger(this._element,tr,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))y.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(nr);else for(const e of y.parents(t,".nav, .list-group"))for(const n of y.prev(e,or))n.classList.add(nr)}_clearActiveClass(t){t.classList.remove(nr);for(const e of y.find("".concat(ir,".").concat(nr),t))e.classList.remove(nr)}static jQueryInterface(e){return this.each(function(){var t=ar.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}t(ar);g=ar;const cr="scrollspy";v="mdb.".concat(cr),E=".".concat(v);const lr="activate.bs.scrollspy",ur="activate".concat(E);w="load".concat(E).concat(".data-api");const hr="collapsible-scrollspy";const dr=".".concat("active"),fr=".".concat(hr);class pr extends g{constructor(t,e){super(t,e),this._collapsibles=[],this._init()}dispose(){s.off(this._scrollElement,lr),super.dispose()}static get NAME(){return cr}_init(){this._bindActivateEvent(),this._getCollapsibles(),0!==this._collapsibles.length&&(this._showSubsection(),this._hideSubsection())}_getHeight(t){return t.offsetHeight}_hide(t){t=a.findOne("ul",t.parentNode);t.style.overflow="hidden",t.style.height="".concat(0,"px")}_show(t,e){t.style.height=e}_getCollapsibles(){var t=a.find(fr);t&&t.forEach(t=>{var e=t.parentNode,e=a.findOne("ul",e),n=e.offsetHeight;this._collapsibles.push({element:e,relatedTarget:t.getAttribute("href"),height:"".concat(n,"px")})})}_showSubsection(){a.find(dr).filter(t=>c.hasClass(t,hr)).forEach(e=>{var t=a.findOne("ul",e.parentNode),n=this._collapsibles.find(t=>t.relatedTarget=e.getAttribute("href")).height;this._show(t,n)})}_hideSubsection(){a.find(fr).filter(t=>!1===c.hasClass(t,"active")).forEach(t=>{this._hide(t)})}_bindActivateEvent(){s.on(this._element,lr,t=>{this._showSubsection(),this._hideSubsection(),s.trigger(this._element,ur,{relatedTarget:t.relatedTarget})})}}s.on(window,w,()=>{a.find('[data-mdb-spy="scroll"]').forEach(t=>{var e=pr.getInstance(t);e||new pr(t,c.getDataAttributes(t))})}),o(()=>{const t=i();if(t){const e=t.fn[cr];t.fn[cr]=pr.jQueryInterface,t.fn[cr].Constructor=pr,t.fn[cr].noConflict=()=>(t.fn[cr]=e,pr.jQueryInterface)}});var gr=pr;v=".".concat("bs.tab");const mr="hide".concat(v),_r="hidden".concat(v),vr="show".concat(v),br="shown".concat(v);E="click".concat(v);const yr="keydown".concat(v);g="load".concat(v);const wr="ArrowRight",Er="ArrowDown",xr="active",Cr="show";w=":not(.dropdown-toggle)";v=".nav-link".concat(w,", .list-group-item").concat(w,', [role="tab"]').concat(w),w='[data-mdb-toggle="tab"], [data-mdb-toggle="pill"], [data-mdb-toggle="list"]';const Ar="".concat(v,", ").concat(w),Tr=".".concat(xr,'[data-mdb-toggle="tab"], .').concat(xr,'[data-mdb-toggle="pill"], .').concat(xr,'[data-mdb-toggle="list"]');class Or extends m{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),f.on(this._element,yr,t=>this._keydown(t)))}static get NAME(){return"tab"}show(){var t,e,n=this._element;this._elemIsActive(n)||(e=(t=this._getActiveElem())?f.trigger(t,mr,{relatedTarget:n}):null,f.trigger(n,vr,{relatedTarget:t}).defaultPrevented)||e&&e.defaultPrevented||(this._deactivate(t,n),this._activate(n,t))}_activate(t,e){t&&(t.classList.add(xr),this._activate(l(t)),this._queueCallback(()=>{"tab"!==t.getAttribute("role")?t.classList.add(Cr):(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),f.trigger(t,br,{relatedTarget:e}))},t,t.classList.contains("fade")))}_deactivate(t,e){t&&(t.classList.remove(xr),t.blur(),this._deactivate(l(t)),this._queueCallback(()=>{"tab"!==t.getAttribute("role")?t.classList.remove(Cr):(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),f.trigger(t,_r,{relatedTarget:e}))},t,t.classList.contains("fade")))}_keydown(t){var e;["ArrowLeft",wr,"ArrowUp",Er].includes(t.key)&&(t.stopPropagation(),t.preventDefault(),e=[wr,Er].includes(t.key),t=bt(this._getChildren().filter(t=>!dt(t)),t.target,e,!0))&&(t.focus({preventScroll:!0}),Or.getOrCreateInstance(t).show())}_getChildren(){return y.find(Ar,this._parent)}_getActiveElem(){return this._getChildren().find(t=>this._elemIsActive(t))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const n of e)this._setInitialAttributesOnChild(n)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);var e=this._elemIsActive(t),n=this._getOuterElement(t);t.setAttribute("aria-selected",e),n!==t&&this._setAttributeIfNotExists(n,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){var e=l(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id)&&this._setAttributeIfNotExists(e,"aria-labelledby","#".concat(t.id))}_toggleDropDown(t,n){const i=this._getOuterElement(t);i.classList.contains("dropdown")&&((t=(t,e)=>{t=y.findOne(t,i);t&&t.classList.toggle(e,n)})(".dropdown-toggle",xr),t(".dropdown-menu",Cr),i.setAttribute("aria-expanded",n))}_setAttributeIfNotExists(t,e,n){t.hasAttribute(e)||t.setAttribute(e,n)}_elemIsActive(t){return t.classList.contains(xr)}_getInnerElement(t){return t.matches(Ar)?t:y.findOne(Ar,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(e){return this.each(function(){var t=Or.getOrCreateInstance(this);if("string"==typeof e){if(void 0===t[e]||e.startsWith("_")||"constructor"===e)throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}f.on(document,E,w,function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),dt(this)||Or.getOrCreateInstance(this).show()}),f.on(window,g,()=>{for(const t of y.find(Tr))Or.getOrCreateInstance(t)}),t(Or);v=Or;const Sr="tab";E="mdb.".concat(Sr),w=".".concat(E);const Lr="show.bs.tab",Ir="shown.bs.tab",kr="show".concat(w),Dr="shown".concat(w),jr="hide".concat(w),Pr="hidden".concat(w);class Nr extends v{dispose(){s.off(this._element,Lr),s.off(this._element,Ir),super.dispose()}static get NAME(){return Sr}show(){var n=this._element;if(!this._elemIsActive(n)){var i=this._getActiveElem();let t=null,e=null;i&&(t=s.trigger(i,"hide.bs.tab",{relatedTarget:n}),e=s.trigger(i,jr,{relatedTarget:n}));var o=s.trigger(n,Lr,{relatedTarget:i}),r=s.trigger(n,kr,{relatedTarget:i});o.defaultPrevented&&r.defaultPrevented||t&&t.defaultPrevented&&e&&e.defaultPrevented||(this._deactivate(i,n),this._activate(n,i))}}_activate(t,e){t&&(t.classList.add("active"),this._activate(R(t)),this._queueCallback(()=>{"tab"!==t.getAttribute("role")?t.classList.add("show"):(t.focus(),t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),s.trigger(t,Ir,{relatedTarget:e}),s.trigger(t,Dr,{relatedTarget:e}))},t,t.classList.contains("fade")))}_deactivate(t,e){t&&(t.classList.remove("active"),t.blur(),this._deactivate(R(t)),this._queueCallback(()=>{"tab"!==t.getAttribute("role")?t.classList.remove("show"):(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),s.trigger(t,"hidden.bs.tab",{relatedTarget:e}),s.trigger(t,Pr,{relatedTarget:e}))},t,t.classList.contains("fade")))}}a.find('[data-mdb-toggle="tab"], [data-mdb-toggle="pill"], [data-mdb-toggle="list"]').forEach(t=>{var e=Nr.getInstance(t);e||new Nr(t)}),o(()=>{const t=i();if(t){const e=t.fn.tab;t.fn.tab=Nr.jQueryInterface,t.fn.tab.Constructor=Nr,t.fn.tab.noConflict=()=>(t.fn.tab=e,Nr.jQueryInterface)}});var Mr=Nr;const Hr="tooltip";g="mdb.".concat(Hr),E=".".concat(g);const Rr="hide.bs.tooltip",Br="hidden.bs.tooltip",Wr="show.bs.tooltip",Fr="shown.bs.tooltip",qr="inserted.bs.tooltip",zr="hide".concat(E),Qr="hidden".concat(E),Vr="show".concat(E),Ur="shown".concat(E),Yr="inserted".concat(E);class Xr extends _{constructor(t,e){super(t,e),this._init()}dispose(){s.off(this._element,Wr),s.off(this._element,Fr),s.off(this._element,Rr),s.off(this._element,Br),s.off(this._element,qr),super.dispose()}static get NAME(){return Hr}_init(){this._bindShowEvent(),this._bindShownEvent(),this._bindHideEvent(),this._bindHiddenEvent(),this._bindHidePreventedEvent()}_bindShowEvent(){s.on(this.element,Wr,()=>{s.trigger(this.element,Vr)})}_bindShownEvent(){s.on(this.element,Fr,()=>{s.trigger(this.element,Ur)})}_bindHideEvent(){s.on(this.element,Rr,()=>{s.trigger(this.element,zr)})}_bindHiddenEvent(){s.on(this.element,Br,()=>{s.trigger(this.element,Qr)})}_bindHidePreventedEvent(){s.on(this.element,qr,()=>{s.trigger(this.element,Yr)})}}a.find('[data-mdb-toggle="tooltip"]').forEach(t=>{var e=Xr.getInstance(t);e||new Xr(t)}),o(()=>{const t=i();if(t){const e=t.fn[Hr];t.fn[Hr]=Xr.jQueryInterface,t.fn[Hr].Constructor=Xr,t.fn[Hr].noConflict=()=>(t.fn[Hr]=e,Xr.jQueryInterface)}});var Kr=Xr;w=".".concat("bs.toast");const $r="mouseover".concat(w),Gr="mouseout".concat(w),Zr="focusin".concat(w),Jr="focusout".concat(w),ts="hide".concat(w),es="hidden".concat(w),ns="show".concat(w),is="shown".concat(w),os="show",rs="showing",ss={animation:"boolean",autohide:"boolean",delay:"number"},as={animation:!0,autohide:!0,delay:5e3};class cs extends m{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return as}static get DefaultType(){return ss}static get NAME(){return"toast"}show(){f.trigger(this._element,ns).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove("hide"),gt(this._element),this._element.classList.add(os,rs),this._queueCallback(()=>{this._element.classList.remove(rs),f.trigger(this._element,is),this._maybeScheduleHide()},this._element,this._config.animation))}hide(){this.isShown()&&!f.trigger(this._element,ts).defaultPrevented&&(this._element.classList.add(rs),this._queueCallback(()=>{this._element.classList.add("hide"),this._element.classList.remove(rs,os),f.trigger(this._element,es)},this._element,this._config.animation))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(os),super.dispose()}isShown(){return this._element.classList.contains(os)}_maybeScheduleHide(){!this._config.autohide||this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout(()=>{this.hide()},this._config.delay))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}e?this._clearTimeout():(t=t.relatedTarget,this._element===t||this._element.contains(t)||this._maybeScheduleHide())}_setListeners(){f.on(this._element,$r,t=>this._onInteraction(t,!0)),f.on(this._element,Gr,t=>this._onInteraction(t,!1)),f.on(this._element,Zr,t=>this._onInteraction(t,!0)),f.on(this._element,Jr,t=>this._onInteraction(t,!1))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(e){return this.each(function(){var t=cs.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError('No method named "'.concat(e,'"'));t[e](this)}})}}ve(cs),t(cs);v=cs;const ls="toast";g="mdb.".concat(ls),E=".".concat(g);const us="show.bs.toast",hs="shown.bs.toast",ds="hide.bs.toast",fs="hidden.bs.toast",ps="show".concat(E),gs="shown".concat(E),ms="hide".concat(E),_s="hidden".concat(E);class vs extends v{constructor(t,e){super(t,e),this._init()}dispose(){s.off(this._element,us),s.off(this._element,hs),s.off(this._element,ds),s.off(this._element,fs),super.dispose()}static get NAME(){return ls}_init(){this._bindShowEvent(),this._bindShownEvent(),this._bindHideEvent(),this._bindHiddenEvent()}_bindShowEvent(){s.on(this._element,us,()=>{s.trigger(this._element,ps)})}_bindShownEvent(){s.on(this._element,hs,()=>{s.trigger(this._element,gs)})}_bindHideEvent(){s.on(this._element,ds,()=>{s.trigger(this._element,ms)})}_bindHiddenEvent(){s.on(this._element,fs,()=>{s.trigger(this._element,_s)})}}a.find(".toast").forEach(t=>{var e=vs.getInstance(t);e||new vs(t)}),o(()=>{const t=i();if(t){const e=t.fn[ls];t.fn[ls]=vs.jQueryInterface,t.fn[ls].Constructor=vs,t.fn[ls].noConflict=()=>(t.fn[ls]=e,vs.jQueryInterface)}});var bs=vs;e(116);const ys="input",ws="mdb.input";_="form-outline";const Es="active",xs="form-notch",Cs="form-notch-leading",As="form-notch-middle";const Ts=".".concat(_," input"),Os=".".concat(_," textarea"),Ss=".".concat(xs),Ls=".".concat(Cs),Is=".".concat(As),ks=".".concat("form-helper");class j{constructor(t){this._element=t,this._label=null,this._labelWidth=0,this._labelMarginLeft=0,this._notchLeading=null,this._notchMiddle=null,this._notchTrailing=null,this._initiated=!1,this._helper=null,this._counter=!1,this._counterElement=null,this._maxLength=0,this._leadingIcon=null,this._element&&(r.setData(t,ws,this),this.init())}static get NAME(){return ys}get input(){return a.findOne("input",this._element)||a.findOne("textarea",this._element)}init(){this._initiated||(this._getLabelData(),this._applyDivs(),this._applyNotch(),this._activate(),this._getHelper(),this._getCounter(),this._initiated=!0)}update(){this._getLabelData(),this._getNotchData(),this._applyNotch(),this._activate(),this._getHelper(),this._getCounter()}forceActive(){c.addClass(this.input,Es)}forceInactive(){c.removeClass(this.input,Es)}dispose(){this._removeBorder(),r.removeData(this._element,ws),this._element=null}_getLabelData(){this._label=a.findOne("label",this._element),null===this._label?this._showPlaceholder():(this._getLabelWidth(),this._getLabelPositionInInputGroup(),this._toggleDefaultDatePlaceholder())}_getHelper(){this._helper=a.findOne(ks,this._element)}_getCounter(){this._counter=c.getDataAttribute(this.input,"showcounter"),this._counter&&(this._maxLength=this.input.maxLength,this._showCounter())}_showCounter(){var t;0{var t=this.input.value.length;this._counterElement.innerHTML="".concat(t," / ").concat(this._maxLength)})}_toggleDefaultDatePlaceholder(){var t=0{this._getElements(e);var t=e?e.target:this.input;""!==t.value&&c.addClass(t,Es),this._toggleDefaultDatePlaceholder(t)})}_getElements(t){var e;t&&(this._element=t.target.parentNode,this._label=a.findOne("label",this._element)),t&&this._label&&(e=this._labelWidth,this._getLabelData(),e!==this._labelWidth)&&(this._notchMiddle=a.findOne(".form-notch-middle",t.target.parentNode),this._notchLeading=a.findOne(Ls,t.target.parentNode),this._applyNotch())}_deactivate(t){t=t?t.target:this.input;""===t.value&&t.classList.remove(Es),this._toggleDefaultDatePlaceholder(t)}static activate(e){return function(t){e._activate(t)}}static deactivate(e){return function(t){e._deactivate(t)}}static jQueryInterface(n,i){return this.each(function(){let t=r.getData(this,ws);var e="object"==typeof n&&n;if((t||!/dispose/.test(n))&&(t=t||new j(this,e),"string"==typeof n)){if(void 0===t[n])throw new TypeError('No method named "'.concat(n,'"'));t[n](i)}})}static getInstance(t){return r.getData(t,ws)}static getOrCreateInstance(t){var e=1{a.find(Ts,t.target).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()}),a.find(Os,t.target).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()})}),s.on(window,"shown.bs.dropdown",t=>{t=t.target.parentNode.querySelector(".dropdown-menu");t&&(a.find(Ts,t).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()}),a.find(Os,t).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()}))}),s.on(window,"shown.bs.tab",t=>{let e;e=(t.target.href||c.getDataAttribute(t.target,"target")).split("#")[1];t=a.findOne("#".concat(e));a.find(Ts,t).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()}),a.find(Os,t).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.update()})}),a.find(".".concat(_)).map(t=>new j(t)),s.on(window,"reset",t=>{a.find(Ts,t.target).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.forceInactive()}),a.find(Os,t.target).forEach(t=>{t=j.getInstance(t.parentNode);t&&t.forceInactive()})}),s.on(window,"onautocomplete",t=>{var e=j.getInstance(t.target.parentNode);e&&t.cancelable&&e.forceActive()}),o(()=>{const t=i();if(t){const e=t.fn[ys];t.fn[ys]=j.jQueryInterface,t.fn[ys].Constructor=j,t.fn[ys].noConflict=()=>(t.fn[ys]=e,j.jQueryInterface)}});var Ds=j;w=".".concat("bs.collapse");const js="show".concat(w),Ps="shown".concat(w),Ns="hide".concat(w),Ms="hidden".concat(w);g="click".concat(w).concat(".data-api");const Hs="show",Rs="collapse",Bs="collapsing",Ws=":scope .".concat(Rs," .").concat(Rs),Fs='[data-mdb-toggle="collapse"]',qs={parent:null,toggle:!0},zs={parent:"(null|element)",toggle:"boolean"};class Qs extends m{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];for(const o of y.find(Fs)){var n=ct(o),i=y.find(n).filter(t=>t===this._element);null!==n&&i.length&&this._triggerArray.push(o)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return qs}static get DefaultType(){return zs}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(!this._isTransitioning&&!this._isShown()){let t=[];if(!(t=this._config.parent?this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter(t=>t!==this._element).map(t=>Qs.getOrCreateInstance(t,{toggle:!1})):t).length||!t[0]._isTransitioning){var e=f.trigger(this._element,js);if(!e.defaultPrevented){for(const i of t)i.hide();const n=this._getDimension();this._element.classList.remove(Rs),this._element.classList.add(Bs),this._element.style[n]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;e=n[0].toUpperCase()+n.slice(1),e="scroll".concat(e);this._queueCallback(()=>{this._isTransitioning=!1,this._element.classList.remove(Bs),this._element.classList.add(Rs,Hs),this._element.style[n]="",f.trigger(this._element,Ps)},this._element,!0),this._element.style[n]="".concat(this._element[e],"px")}}}}hide(){if(!this._isTransitioning&&this._isShown()){var t=f.trigger(this._element,Ns);if(!t.defaultPrevented){t=this._getDimension();this._element.style[t]="".concat(this._element.getBoundingClientRect()[t],"px"),gt(this._element),this._element.classList.add(Bs),this._element.classList.remove(Rs,Hs);for(const n of this._triggerArray){var e=l(n);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([n],!1)}this._isTransitioning=!0;this._element.style[t]="",this._queueCallback(()=>{this._isTransitioning=!1,this._element.classList.remove(Bs),this._element.classList.add(Rs),f.trigger(this._element,Ms)},this._element,!0)}}}_isShown(){return(0!e.includes(t))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const n of t)n.classList.toggle("collapsed",!e),n.setAttribute("aria-expanded",e)}static jQueryInterface(e){const n={};return"string"==typeof e&&/show|hide/.test(e)&&(n.toggle=!1),this.each(function(){var t=Qs.getOrCreateInstance(this,n);if("string"==typeof e){if(void 0===t[e])throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}}f.on(document,g,Fs,function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();t=ct(this);for(const e of y.find(t))Qs.getOrCreateInstance(e,{toggle:!1}).toggle()}),t(Qs);E=Qs;const Vs="collapse";v="mdb.".concat(Vs),e=".".concat(v);const Us="show.bs.collapse",Ys="shown.bs.collapse",Xs="hide.bs.collapse",Ks="hidden.bs.collapse",$s="show".concat(e),Gs="shown".concat(e),Zs="hide".concat(e),Js="hidden".concat(e);class ta extends E{constructor(t){super(t,1{s.trigger(this._element,$s)})}_bindShownEvent(){s.on(this._element,Ys,()=>{s.trigger(this._element,Gs)})}_bindHideEvent(){s.on(this._element,Xs,()=>{s.trigger(this._element,Zs)})}_bindHiddenEvent(){s.on(this._element,Ks,()=>{s.trigger(this._element,Js)})}}a.find('[data-mdb-toggle="collapse"]').forEach(t=>{var e=ta.getInstance(t);e||new ta(t,{toggle:!1})}),o(()=>{const t=i();if(t){const e=t.fn[Vs];t.fn[Vs]=ta.jQueryInterface,t.fn[Vs].Constructor=ta,t.fn[Vs].noConflict=()=>(t.fn[Vs]=e,ta.jQueryInterface)}});var ea=ta;const na="dropdown";_=".".concat("bs.dropdown"),w=".data-api";const ia="ArrowDown",oa="hide".concat(_),ra="hidden".concat(_),sa="show".concat(_),aa="shown".concat(_);g="click".concat(_).concat(w),v="keydown".concat(_).concat(w),e="keyup".concat(_).concat(w);const ca="show",la='[data-mdb-toggle="dropdown"]:not(.disabled):not(:disabled)',ua="".concat(la,".").concat(ca),ha=".dropdown-menu",da=h()?"top-end":"top-start",fa=h()?"top-start":"top-end",pa=h()?"bottom-end":"bottom-start",ga=h()?"bottom-start":"bottom-end",ma=h()?"left-start":"right-start",_a=h()?"right-start":"left-start",va={autoClose:!0,boundary:"clippingParents",display:"dynamic",offset:[0,2],popperConfig:null,reference:"toggle"},ba={autoClose:"(boolean|string)",boundary:"(string|element)",display:"string",offset:"(array|string|function)",popperConfig:"(null|object|function)",reference:"(string|element|object)"};class P extends m{constructor(t,e){super(t,e),this._popper=null,this._parent=this._element.parentNode,this._menu=y.next(this._element,ha)[0]||y.prev(this._element,ha)[0]||y.findOne(ha,this._parent),this._inNavbar=this._detectNavbar()}static get Default(){return va}static get DefaultType(){return ba}static get NAME(){return na}toggle(){return this._isShown()?this.hide():this.show()}show(){if(!dt(this._element)&&!this._isShown()){var t={relatedTarget:this._element},e=f.trigger(this._element,sa,t);if(!e.defaultPrevented){if(this._createPopper(),"ontouchstart"in document.documentElement&&!this._parent.closest(".navbar-nav"))for(const n of[].concat(...document.body.children))f.on(n,"mouseover",pt);this._element.focus(),this._element.setAttribute("aria-expanded",!0),this._menu.classList.add(ca),this._element.classList.add(ca),f.trigger(this._element,aa,t)}}}hide(){var t;!dt(this._element)&&this._isShown()&&(t={relatedTarget:this._element},this._completeHide(t))}dispose(){this._popper&&this._popper.destroy(),super.dispose()}update(){this._inNavbar=this._detectNavbar(),this._popper&&this._popper.update()}_completeHide(t){var e=f.trigger(this._element,oa,t);if(!e.defaultPrevented){if("ontouchstart"in document.documentElement)for(const n of[].concat(...document.body.children))f.off(n,"mouseover",pt);this._popper&&this._popper.destroy(),this._menu.classList.remove(ca),this._element.classList.remove(ca),this._element.setAttribute("aria-expanded","false"),p.removeDataAttribute(this._menu,"popper"),f.trigger(this._element,ra,t)}}_getConfig(t){if("object"!=typeof(t=super._getConfig(t)).reference||u(t.reference)||"function"==typeof t.reference.getBoundingClientRect)return t;throw new TypeError("".concat(na.toUpperCase(),': Option "reference" provided type "object" without a required "getBoundingClientRect" method.'))}_createPopper(){if(void 0===n)throw new TypeError("Bootstrap's dropdowns require Popper (https://popper.js.org)");let t=this._element;"parent"===this._config.reference?t=this._parent:u(this._config.reference)?t=ut(this._config.reference):"object"==typeof this._config.reference&&(t=this._config.reference);var e=this._getPopperConfig();this._popper=_o(t,this._menu,e)}_isShown(){return this._menu.classList.contains(ca)}_getPlacement(){var t,e=this._parent;return e.classList.contains("dropend")?ma:e.classList.contains("dropstart")?_a:e.classList.contains("dropup-center")?"top":e.classList.contains("dropdown-center")?"bottom":(t="end"===getComputedStyle(this._menu).getPropertyValue("--bs-position").trim(),e.classList.contains("dropup")?t?fa:da:t?ga:pa)}_detectNavbar(){return null!==this._element.closest(".navbar")}_getOffset(){const e=this._config["offset"];return"string"==typeof e?e.split(",").map(t=>Number.parseInt(t,10)):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(){var t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return!this._inNavbar&&"static"!==this._config.display||(p.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_selectMenuItem(t){var{key:t,target:e}=t,n=y.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter(t=>ht(t));n.length&&bt(n,e,t===ia,!n.includes(e)).focus()}static jQueryInterface(e){return this.each(function(){var t=P.getOrCreateInstance(this,e);if("string"==typeof e){if(void 0===t[e])throw new TypeError('No method named "'.concat(e,'"'));t[e]()}})}static clearMenus(t){if(2!==t.button&&("keyup"!==t.type||"Tab"===t.key))for(const o of y.find(ua)){var e,n,i=P.getInstance(o);i&&!1!==i._config.autoClose&&(e=(n=t.composedPath()).includes(i._menu),n.includes(i._element)||"inside"===i._config.autoClose&&!e||"outside"===i._config.autoClose&&e||i._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName))||(n={relatedTarget:i._element},"click"===t.type&&(n.clickEvent=t),i._completeHide(n)))}}static dataApiKeydownHandler(t){var e=/input|textarea/i.test(t.target.tagName),n="Escape"===t.key,i=["ArrowUp",ia].includes(t.key);!i&&!n||e&&!n||(t.preventDefault(),e=this.matches(la)?this:y.prev(this,la)[0]||y.next(this,la)[0]||y.findOne(la,t.delegateTarget.parentNode),n=P.getOrCreateInstance(e),i?(t.stopPropagation(),n.show(),n._selectMenuItem(t)):n._isShown()&&(t.stopPropagation(),n.hide(),e.focus()))}}f.on(document,v,la,P.dataApiKeydownHandler),f.on(document,v,ha,P.dataApiKeydownHandler),f.on(document,g,P.clearMenus),f.on(document,e,P.clearMenus),f.on(document,g,la,function(t){t.preventDefault(),P.getOrCreateInstance(this).toggle()}),t(P);E=P;const ya="dropdown";_="mdb.".concat(ya),w=".".concat(_);const wa={offset:[0,2],flip:!0,boundary:"clippingParents",reference:"toggle",display:"dynamic",popperConfig:null,dropdownAnimation:"on"},Ea={offset:"(array|string|function)",flip:"boolean",boundary:"(string|element)",reference:"(string|element|object)",display:"string",popperConfig:"(null|object|function)",dropdownAnimation:"string"},xa="hide.bs.dropdown",Ca="hidden.bs.dropdown",Aa="show.bs.dropdown",Ta="shown.bs.dropdown",Oa="hide".concat(w),Sa="hidden".concat(w),La="show".concat(w),Ia="shown".concat(w),ka="animation",Da="fade-in",ja="fade-out";class Pa extends E{constructor(t,e){super(t,e),this._config=this._getConfig(e),this._menuStyle="",this._popperPlacement="",this._mdbPopperConfig="";t=window.matchMedia("(prefers-reduced-motion: reduce)").matches;"on"!==this._config.dropdownAnimation||t||this._init()}dispose(){s.off(this._element,Aa),s.off(this._parent,Ta),s.off(this._parent,xa),s.off(this._parent,Ca),super.dispose()}static get NAME(){return ya}_init(){this._bindShowEvent(),this._bindShownEvent(),this._bindHideEvent(),this._bindHiddenEvent()}_getConfig(t){t={...wa,...c.getDataAttributes(this._element),...t};return B(ya,t,Ea),t}_getOffset(){const e=this._config["offset"];return"string"==typeof e?e.split(",").map(t=>Number.parseInt(t,10)):"function"==typeof e?t=>e(t,this._element):e}_getPopperConfig(){var t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{altBoundary:this._config.flip,boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return"static"===this._config.display&&(t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,..."function"==typeof this._config.popperConfig?this._config.popperConfig(t):this._config.popperConfig}}_bindShowEvent(){s.on(this._element,Aa,t=>{s.trigger(this._element,La,{relatedTarget:t.relatedTarget}).defaultPrevented?t.preventDefault():this._dropdownAnimationStart("show")})}_bindShownEvent(){s.on(this._parent,Ta,t=>{s.trigger(this._parent,Ia,{relatedTarget:t.relatedTarget}).defaultPrevented&&t.preventDefault()})}_bindHideEvent(){s.on(this._parent,xa,t=>{s.trigger(this._parent,Oa,{relatedTarget:t.relatedTarget}).defaultPrevented?t.preventDefault():(this._menuStyle=this._menu.style.cssText,this._popperPlacement=this._menu.getAttribute("data-popper-placement"),this._mdbPopperConfig=this._menu.getAttribute("data-mdb-popper"))})}_bindHiddenEvent(){s.on(this._parent,Ca,t=>{s.trigger(this._parent,Sa,{relatedTarget:t.relatedTarget}).defaultPrevented?t.preventDefault():("static"!==this._config.display&&""!==this._menuStyle&&(this._menu.style.cssText=this._menuStyle),this._menu.setAttribute("data-popper-placement",this._popperPlacement),this._menu.setAttribute("data-mdb-popper",this._mdbPopperConfig),this._dropdownAnimationStart("hide"))})}_dropdownAnimationStart(t){"show"===t?(this._menu.classList.add(ka,Da),this._menu.classList.remove(ja)):(this._menu.classList.add(ka,ja),this._menu.classList.remove(Da)),this._bindAnimationEnd()}_bindAnimationEnd(){s.one(this._menu,"animationend",()=>{this._menu.classList.remove(ka,ja,Da)})}}a.find('[data-mdb-toggle="dropdown"]').forEach(t=>{var e=Pa.getInstance(t);e||new Pa(t)}),o(()=>{const t=i();if(t){const e=t.fn[ya];t.fn[ya]=Pa.jQueryInterface,t.fn[ya].Constructor=Pa,t.fn[ya].noConflict=()=>(t.fn[ya]=e,Pa.jQueryInterface)}});var Na=Pa;const Ma="ripple",Ha="mdb.ripple",N="ripple-surface",Ra="ripple-wave",Ba="input-wrapper",Wa=[".btn",".ripple"],Fa="ripple-surface-unbound",qa=[0,0,0],za=["primary","secondary","success","danger","warning","info","light","dark"],Qa={rippleCentered:!1,rippleColor:"",rippleDuration:"500ms",rippleRadius:0,rippleUnbound:!1},Va={rippleCentered:"boolean",rippleColor:"string",rippleDuration:"string",rippleRadius:"number",rippleUnbound:"boolean"};class Ua{constructor(t,e){this._element=t,this._options=this._getConfig(e),this._element&&(r.setData(t,Ha,this),c.addClass(this._element,N)),this._clickHandler=this._createRipple.bind(this),this._rippleTimer=null,this._isMinWidthSet=!1,this._rippleInSpan=!1,this.init()}static get NAME(){return Ma}init(){this._addClickEvent(this._element)}dispose(){r.removeData(this._element,Ha),s.off(this._element,"click",this._clickHandler),this._element=null,this._options=null}_autoInit(e){if(Wa.forEach(t=>{a.closest(e.target,t)&&(this._element=a.closest(e.target,t))}),this._options=this._getConfig(),"input"===this._element.tagName.toLowerCase()){var t=this._element.parentNode;if(this._rippleInSpan=!0,"span"===t.tagName.toLowerCase()&&t.classList.contains(N))this._element=t;else{var n=getComputedStyle(this._element).boxShadow;const o=this._element;var i=document.createElement("span");o.classList.contains("btn-block")&&(i.style.display="block"),s.one(i,"mouseup",t=>{0===t.button&&o.click()}),i.classList.add(N,Ba),c.addStyle(i,{border:0,"box-shadow":n}),t.replaceChild(i,this._element),i.appendChild(this._element),this._element=i}this._element.focus()}this._element.style.minWidth||(c.style(this._element,{"min-width":"".concat(getComputedStyle(this._element).width)}),this._isMinWidthSet=!0),c.addClass(this._element,N),this._createRipple(e)}_addClickEvent(t){s.on(t,"mousedown",this._clickHandler)}_getEventLayer(t){return{layerX:Math.round(t.clientX-t.target.getBoundingClientRect().x),layerY:Math.round(t.clientY-t.target.getBoundingClientRect().y)}}_createRipple(t){c.hasClass(this._element,N)||c.addClass(this._element,N);var{layerX:t,layerY:e}=this._getEventLayer(t),n=this._element.offsetHeight,i=this._element.offsetWidth,o=this._durationToMsNumber(this._options.rippleDuration),r={offsetX:this._options.rippleCentered?n/2:t,offsetY:this._options.rippleCentered?i/2:e,height:n,width:i},r=this._getDiameter(r),s=this._options.rippleRadius||r/2,a={delay:.5*o,duration:o-.5*o},i={left:this._options.rippleCentered?"".concat(i/2-s,"px"):"".concat(t-s,"px"),top:this._options.rippleCentered?"".concat(n/2-s,"px"):"".concat(e-s,"px"),height:"".concat(2*this._options.rippleRadius||r,"px"),width:"".concat(2*this._options.rippleRadius||r,"px"),transitionDelay:"0s, ".concat(a.delay,"ms"),transitionDuration:"".concat(o,"ms, ").concat(a.duration,"ms")},t=W("div");this._createHTMLRipple({wrapper:this._element,ripple:t,styles:i}),this._removeHTMLRipple({ripple:t,duration:o})}_createHTMLRipple(t){let{wrapper:e,ripple:n,styles:i}=t;Object.keys(i).forEach(t=>n.style[t]=i[t]),n.classList.add(Ra),""!==this._options.rippleColor&&(this._removeOldColorClasses(e),this._addColor(n,e)),this._toggleUnbound(e),this._appendRipple(n,e)}_removeHTMLRipple(t){let{ripple:e,duration:n}=t;this._rippleTimer&&(clearTimeout(this._rippleTimer),this._rippleTimer=null),this._rippleTimer=setTimeout(()=>{e&&(e.remove(),this._element)&&(a.find(".".concat(Ra),this._element).forEach(t=>{t.remove()}),this._isMinWidthSet&&(c.style(this._element,{"min-width":""}),this._isMinWidthSet=!1),this._rippleInSpan&&this._element.classList.contains(Ba)?this._removeWrapperSpan():c.removeClass(this._element,N))},n)}_removeWrapperSpan(){var t=this._element.firstChild;this._element.replaceWith(t),this._element=t,this._element.focus(),this._rippleInSpan=!1}_durationToMsNumber(t){return Number(t.replace("ms","").replace("s","000"))}_getConfig(){var t=0Math.sqrt(t**2+e**2),a=e===n/2&&t===i/2;const c=!0==o&&!1==r,l=!0==o&&!0==r,u=!1==o&&!0==r,h=!1==o&&!1==r;o={topLeft:s(t,e),topRight:s(i-t,e),bottomLeft:s(t,n-e),bottomRight:s(i-t,n-e)};let d=0;return a||h?d=o.topLeft:u?d=o.topRight:l?d=o.bottomRight:c&&(d=o.bottomLeft),2*d}_appendRipple(t,e){e.appendChild(t),setTimeout(()=>{c.addClass(t,"active")},50)}_toggleUnbound(t){!0===this._options.rippleUnbound?c.addClass(t,Fa):t.classList.remove(Fa)}_addColor(t,e){za.find(t=>t===this._options.rippleColor.toLowerCase())?c.addClass(e,"".concat(N,"-").concat(this._options.rippleColor.toLowerCase())):(e=this._colorToRGB(this._options.rippleColor).join(","),e="rgba({{color}}, 0.2) 0, rgba({{color}}, 0.3) 40%, rgba({{color}}, 0.4) 50%, rgba({{color}}, 0.5) 60%, rgba({{color}}, 0) 70%".split("{{color}}").join("".concat(e)),t.style.backgroundImage="radial-gradient(circle, ".concat(e,")"))}_removeOldColorClasses(e){var t=new RegExp("".concat(N,"-[a-z]+"),"gi");(e.classList.value.match(t)||[]).forEach(t=>{e.classList.remove(t)})}_colorToRGB(t){var e,n,i;return"transparent"===t.toLowerCase()?qa:"#"===t[0]?((e=t).length<7&&(e="#".concat(e[1]).concat(e[1]).concat(e[2]).concat(e[2]).concat(e[3]).concat(e[3])),[parseInt(e.substr(1,2),16),parseInt(e.substr(3,2),16),parseInt(e.substr(5,2),16)]):(-1===t.indexOf("rgb")&&(e=t,n=document.body.appendChild(document.createElement("fictum")),i="rgb(1, 2, 3)",n.style.color=i,t=n.style.color!==i||(n.style.color=e,n.style.color===i)||""===n.style.color?qa:(e=getComputedStyle(n).color,document.body.removeChild(n),e)),0===t.indexOf("rgb")?((i=(i=t).match(/[.\d]+/g).map(t=>+Number(t))).length=3,i):qa)}static autoInitial(e){return function(t){e._autoInit(t)}}static jQueryInterface(t){return this.each(function(){return r.getData(this,Ha)?null:new Ua(this,t)})}static getInstance(t){return r.getData(t,Ha)}static getOrCreateInstance(t){var e=1{s.one(document,"mousedown",t,Ua.autoInitial(new Ua))}),o(()=>{const t=i();if(t){const e=t.fn[Ma];t.fn[Ma]=Ua.jQueryInterface,t.fn[Ma].Constructor=Ua,t.fn[Ma].noConflict=()=>(t.fn[Ma]=e,Ua.jQueryInterface)}});var Ya=Ua;const Xa="range",Ka="mdb.range";const $a="thumb-active";const Ga=".".concat("thumb-value"),Za=".".concat("thumb");m=".".concat("range");class Ja{constructor(t){this._element=t,this._initiated=!1,this._thumb=null,this._element&&(r.setData(t,Ka,this),this.init())}static get NAME(){return Xa}get rangeInput(){return a.findOne("input[type=range]",this._element)}init(){this._initiated||(this._addThumb(),this._thumbUpdate(),this._handleEvents(),this._initiated=!0)}dispose(){this._disposeEvents(),r.removeData(this._element,Ka),this._element=null,this._thumb=null}_addThumb(){var t=W("span");c.addClass(t,"thumb"),t.innerHTML='',this._element.append(t),this._thumb=a.findOne(Za,this._element)}_handleEvents(){s.on(this.rangeInput,"mousedown",()=>this._showThumb()),s.on(this.rangeInput,"mouseup",()=>this._hideThumb()),s.on(this.rangeInput,"touchstart",()=>this._showThumb()),s.on(this.rangeInput,"touchend",()=>this._hideThumb()),s.on(this.rangeInput,"input",()=>this._thumbUpdate())}_disposeEvents(){s.off(this.rangeInput,"mousedown",this._showThumb),s.off(this.rangeInput,"mouseup",this._hideThumb),s.off(this.rangeInput,"touchstart",this._showThumb),s.off(this.rangeInput,"touchend",this._hideThumb),s.off(this.rangeInput,"input",this._thumbUpdate)}_showThumb(){c.addClass(this._thumb,$a)}_hideThumb(){c.removeClass(this._thumb,$a)}_thumbUpdate(){var t=this.rangeInput,e=t.value,n=t.min||0,t=t.max||100,e=(a.findOne(Ga,this._thumb).textContent=e,Number(100*(e-n)/(t-n)));c.style(this._thumb,{left:"calc(".concat(e,"% + (").concat(8-.15*e,"px))")})}static getInstance(t){return r.getData(t,Ka)}static getOrCreateInstance(t){var e=1new Ja(t)),o(()=>{const t=i();if(t){const e=t.fn[Xa];t.fn[Xa]=Ja.jQueryInterface,t.fn[Xa].Constructor=Ja,t.fn[Xa].noConflict=()=>(t.fn[Xa]=e,Ja.jQueryInterface)}});var tc=Ja}],i={},o.m=n,o.c=i,o.d=function(t,e,n){o.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},o.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(o.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)o.d(n,i,function(t){return e[t]}.bind(null,i));return n},o.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return o.d(e,"a",e),e},o.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},o.p="",o(o.s=119);function o(t){var e;return(i[t]||(e=i[t]={i:t,l:!1,exports:{}},n[t].call(e.exports,e,e.exports,o),e.l=!0,e)).exports}var n,i}); +//# sourceMappingURL=mdb.min.js.map \ No newline at end of file diff --git a/custom/assets/javascripts/vue.global.js b/custom/assets/javascripts/vue.global.js new file mode 100644 index 0000000..1db98bb --- /dev/null +++ b/custom/assets/javascripts/vue.global.js @@ -0,0 +1,15361 @@ +var Vue = (function (exports) { + 'use strict'; + + function makeMap(str, expectsLowerCase) { + const map = /* @__PURE__ */ Object.create(null); + const list = str.split(","); + for (let i = 0; i < list.length; i++) { + map[list[i]] = true; + } + return expectsLowerCase ? (val) => !!map[val.toLowerCase()] : (val) => !!map[val]; + } + + const EMPTY_OBJ = Object.freeze({}) ; + const EMPTY_ARR = Object.freeze([]) ; + const NOOP = () => { + }; + const NO = () => false; + const onRE = /^on[^a-z]/; + const isOn = (key) => onRE.test(key); + const isModelListener = (key) => key.startsWith("onUpdate:"); + const extend = Object.assign; + const remove = (arr, el) => { + const i = arr.indexOf(el); + if (i > -1) { + arr.splice(i, 1); + } + }; + const hasOwnProperty$1 = Object.prototype.hasOwnProperty; + const hasOwn = (val, key) => hasOwnProperty$1.call(val, key); + const isArray = Array.isArray; + const isMap = (val) => toTypeString(val) === "[object Map]"; + const isSet = (val) => toTypeString(val) === "[object Set]"; + const isDate = (val) => toTypeString(val) === "[object Date]"; + const isRegExp = (val) => toTypeString(val) === "[object RegExp]"; + const isFunction = (val) => typeof val === "function"; + const isString = (val) => typeof val === "string"; + const isSymbol = (val) => typeof val === "symbol"; + const isObject = (val) => val !== null && typeof val === "object"; + const isPromise = (val) => { + return isObject(val) && isFunction(val.then) && isFunction(val.catch); + }; + const objectToString = Object.prototype.toString; + const toTypeString = (value) => objectToString.call(value); + const toRawType = (value) => { + return toTypeString(value).slice(8, -1); + }; + const isPlainObject = (val) => toTypeString(val) === "[object Object]"; + const isIntegerKey = (key) => isString(key) && key !== "NaN" && key[0] !== "-" && "" + parseInt(key, 10) === key; + const isReservedProp = /* @__PURE__ */ makeMap( + // the leading comma is intentional so empty string "" is also included + ",key,ref,ref_for,ref_key,onVnodeBeforeMount,onVnodeMounted,onVnodeBeforeUpdate,onVnodeUpdated,onVnodeBeforeUnmount,onVnodeUnmounted" + ); + const isBuiltInDirective = /* @__PURE__ */ makeMap( + "bind,cloak,else-if,else,for,html,if,model,on,once,pre,show,slot,text,memo" + ); + const cacheStringFunction = (fn) => { + const cache = /* @__PURE__ */ Object.create(null); + return (str) => { + const hit = cache[str]; + return hit || (cache[str] = fn(str)); + }; + }; + const camelizeRE = /-(\w)/g; + const camelize = cacheStringFunction((str) => { + return str.replace(camelizeRE, (_, c) => c ? c.toUpperCase() : ""); + }); + const hyphenateRE = /\B([A-Z])/g; + const hyphenate = cacheStringFunction( + (str) => str.replace(hyphenateRE, "-$1").toLowerCase() + ); + const capitalize = cacheStringFunction( + (str) => str.charAt(0).toUpperCase() + str.slice(1) + ); + const toHandlerKey = cacheStringFunction( + (str) => str ? `on${capitalize(str)}` : `` + ); + const hasChanged = (value, oldValue) => !Object.is(value, oldValue); + const invokeArrayFns = (fns, arg) => { + for (let i = 0; i < fns.length; i++) { + fns[i](arg); + } + }; + const def = (obj, key, value) => { + Object.defineProperty(obj, key, { + configurable: true, + enumerable: false, + value + }); + }; + const looseToNumber = (val) => { + const n = parseFloat(val); + return isNaN(n) ? val : n; + }; + const toNumber = (val) => { + const n = isString(val) ? Number(val) : NaN; + return isNaN(n) ? val : n; + }; + let _globalThis; + const getGlobalThis = () => { + return _globalThis || (_globalThis = typeof globalThis !== "undefined" ? globalThis : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : {}); + }; + + const PatchFlagNames = { + [1]: `TEXT`, + [2]: `CLASS`, + [4]: `STYLE`, + [8]: `PROPS`, + [16]: `FULL_PROPS`, + [32]: `HYDRATE_EVENTS`, + [64]: `STABLE_FRAGMENT`, + [128]: `KEYED_FRAGMENT`, + [256]: `UNKEYED_FRAGMENT`, + [512]: `NEED_PATCH`, + [1024]: `DYNAMIC_SLOTS`, + [2048]: `DEV_ROOT_FRAGMENT`, + [-1]: `HOISTED`, + [-2]: `BAIL` + }; + + const slotFlagsText = { + [1]: "STABLE", + [2]: "DYNAMIC", + [3]: "FORWARDED" + }; + + const GLOBALS_WHITE_LISTED = "Infinity,undefined,NaN,isFinite,isNaN,parseFloat,parseInt,decodeURI,decodeURIComponent,encodeURI,encodeURIComponent,Math,Number,Date,Array,Object,Boolean,String,RegExp,Map,Set,JSON,Intl,BigInt,console"; + const isGloballyWhitelisted = /* @__PURE__ */ makeMap(GLOBALS_WHITE_LISTED); + + const range = 2; + function generateCodeFrame(source, start = 0, end = source.length) { + let lines = source.split(/(\r?\n)/); + const newlineSequences = lines.filter((_, idx) => idx % 2 === 1); + lines = lines.filter((_, idx) => idx % 2 === 0); + let count = 0; + const res = []; + for (let i = 0; i < lines.length; i++) { + count += lines[i].length + (newlineSequences[i] && newlineSequences[i].length || 0); + if (count >= start) { + for (let j = i - range; j <= i + range || end > count; j++) { + if (j < 0 || j >= lines.length) + continue; + const line = j + 1; + res.push( + `${line}${" ".repeat(Math.max(3 - String(line).length, 0))}| ${lines[j]}` + ); + const lineLength = lines[j].length; + const newLineSeqLength = newlineSequences[j] && newlineSequences[j].length || 0; + if (j === i) { + const pad = start - (count - (lineLength + newLineSeqLength)); + const length = Math.max( + 1, + end > count ? lineLength - pad : end - start + ); + res.push(` | ` + " ".repeat(pad) + "^".repeat(length)); + } else if (j > i) { + if (end > count) { + const length = Math.max(Math.min(end - count, lineLength), 1); + res.push(` | ` + "^".repeat(length)); + } + count += lineLength + newLineSeqLength; + } + } + break; + } + } + return res.join("\n"); + } + + function normalizeStyle(value) { + if (isArray(value)) { + const res = {}; + for (let i = 0; i < value.length; i++) { + const item = value[i]; + const normalized = isString(item) ? parseStringStyle(item) : normalizeStyle(item); + if (normalized) { + for (const key in normalized) { + res[key] = normalized[key]; + } + } + } + return res; + } else if (isString(value)) { + return value; + } else if (isObject(value)) { + return value; + } + } + const listDelimiterRE = /;(?![^(]*\))/g; + const propertyDelimiterRE = /:([^]+)/; + const styleCommentRE = /\/\*[^]*?\*\//g; + function parseStringStyle(cssText) { + const ret = {}; + cssText.replace(styleCommentRE, "").split(listDelimiterRE).forEach((item) => { + if (item) { + const tmp = item.split(propertyDelimiterRE); + tmp.length > 1 && (ret[tmp[0].trim()] = tmp[1].trim()); + } + }); + return ret; + } + function normalizeClass(value) { + let res = ""; + if (isString(value)) { + res = value; + } else if (isArray(value)) { + for (let i = 0; i < value.length; i++) { + const normalized = normalizeClass(value[i]); + if (normalized) { + res += normalized + " "; + } + } + } else if (isObject(value)) { + for (const name in value) { + if (value[name]) { + res += name + " "; + } + } + } + return res.trim(); + } + function normalizeProps(props) { + if (!props) + return null; + let { class: klass, style } = props; + if (klass && !isString(klass)) { + props.class = normalizeClass(klass); + } + if (style) { + props.style = normalizeStyle(style); + } + return props; + } + + const HTML_TAGS = "html,body,base,head,link,meta,style,title,address,article,aside,footer,header,hgroup,h1,h2,h3,h4,h5,h6,nav,section,div,dd,dl,dt,figcaption,figure,picture,hr,img,li,main,ol,p,pre,ul,a,b,abbr,bdi,bdo,br,cite,code,data,dfn,em,i,kbd,mark,q,rp,rt,ruby,s,samp,small,span,strong,sub,sup,time,u,var,wbr,area,audio,map,track,video,embed,object,param,source,canvas,script,noscript,del,ins,caption,col,colgroup,table,thead,tbody,td,th,tr,button,datalist,fieldset,form,input,label,legend,meter,optgroup,option,output,progress,select,textarea,details,dialog,menu,summary,template,blockquote,iframe,tfoot"; + const SVG_TAGS = "svg,animate,animateMotion,animateTransform,circle,clipPath,color-profile,defs,desc,discard,ellipse,feBlend,feColorMatrix,feComponentTransfer,feComposite,feConvolveMatrix,feDiffuseLighting,feDisplacementMap,feDistantLight,feDropShadow,feFlood,feFuncA,feFuncB,feFuncG,feFuncR,feGaussianBlur,feImage,feMerge,feMergeNode,feMorphology,feOffset,fePointLight,feSpecularLighting,feSpotLight,feTile,feTurbulence,filter,foreignObject,g,hatch,hatchpath,image,line,linearGradient,marker,mask,mesh,meshgradient,meshpatch,meshrow,metadata,mpath,path,pattern,polygon,polyline,radialGradient,rect,set,solidcolor,stop,switch,symbol,text,textPath,title,tspan,unknown,use,view"; + const VOID_TAGS = "area,base,br,col,embed,hr,img,input,link,meta,param,source,track,wbr"; + const isHTMLTag = /* @__PURE__ */ makeMap(HTML_TAGS); + const isSVGTag = /* @__PURE__ */ makeMap(SVG_TAGS); + const isVoidTag = /* @__PURE__ */ makeMap(VOID_TAGS); + + const specialBooleanAttrs = `itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly`; + const isSpecialBooleanAttr = /* @__PURE__ */ makeMap(specialBooleanAttrs); + function includeBooleanAttr(value) { + return !!value || value === ""; + } + + function looseCompareArrays(a, b) { + if (a.length !== b.length) + return false; + let equal = true; + for (let i = 0; equal && i < a.length; i++) { + equal = looseEqual(a[i], b[i]); + } + return equal; + } + function looseEqual(a, b) { + if (a === b) + return true; + let aValidType = isDate(a); + let bValidType = isDate(b); + if (aValidType || bValidType) { + return aValidType && bValidType ? a.getTime() === b.getTime() : false; + } + aValidType = isSymbol(a); + bValidType = isSymbol(b); + if (aValidType || bValidType) { + return a === b; + } + aValidType = isArray(a); + bValidType = isArray(b); + if (aValidType || bValidType) { + return aValidType && bValidType ? looseCompareArrays(a, b) : false; + } + aValidType = isObject(a); + bValidType = isObject(b); + if (aValidType || bValidType) { + if (!aValidType || !bValidType) { + return false; + } + const aKeysCount = Object.keys(a).length; + const bKeysCount = Object.keys(b).length; + if (aKeysCount !== bKeysCount) { + return false; + } + for (const key in a) { + const aHasKey = a.hasOwnProperty(key); + const bHasKey = b.hasOwnProperty(key); + if (aHasKey && !bHasKey || !aHasKey && bHasKey || !looseEqual(a[key], b[key])) { + return false; + } + } + } + return String(a) === String(b); + } + function looseIndexOf(arr, val) { + return arr.findIndex((item) => looseEqual(item, val)); + } + + const toDisplayString = (val) => { + return isString(val) ? val : val == null ? "" : isArray(val) || isObject(val) && (val.toString === objectToString || !isFunction(val.toString)) ? JSON.stringify(val, replacer, 2) : String(val); + }; + const replacer = (_key, val) => { + if (val && val.__v_isRef) { + return replacer(_key, val.value); + } else if (isMap(val)) { + return { + [`Map(${val.size})`]: [...val.entries()].reduce((entries, [key, val2]) => { + entries[`${key} =>`] = val2; + return entries; + }, {}) + }; + } else if (isSet(val)) { + return { + [`Set(${val.size})`]: [...val.values()] + }; + } else if (isObject(val) && !isArray(val) && !isPlainObject(val)) { + return String(val); + } + return val; + }; + + function warn$1(msg, ...args) { + console.warn(`[Vue warn] ${msg}`, ...args); + } + + let activeEffectScope; + class EffectScope { + constructor(detached = false) { + this.detached = detached; + /** + * @internal + */ + this._active = true; + /** + * @internal + */ + this.effects = []; + /** + * @internal + */ + this.cleanups = []; + this.parent = activeEffectScope; + if (!detached && activeEffectScope) { + this.index = (activeEffectScope.scopes || (activeEffectScope.scopes = [])).push( + this + ) - 1; + } + } + get active() { + return this._active; + } + run(fn) { + if (this._active) { + const currentEffectScope = activeEffectScope; + try { + activeEffectScope = this; + return fn(); + } finally { + activeEffectScope = currentEffectScope; + } + } else { + warn$1(`cannot run an inactive effect scope.`); + } + } + /** + * This should only be called on non-detached scopes + * @internal + */ + on() { + activeEffectScope = this; + } + /** + * This should only be called on non-detached scopes + * @internal + */ + off() { + activeEffectScope = this.parent; + } + stop(fromParent) { + if (this._active) { + let i, l; + for (i = 0, l = this.effects.length; i < l; i++) { + this.effects[i].stop(); + } + for (i = 0, l = this.cleanups.length; i < l; i++) { + this.cleanups[i](); + } + if (this.scopes) { + for (i = 0, l = this.scopes.length; i < l; i++) { + this.scopes[i].stop(true); + } + } + if (!this.detached && this.parent && !fromParent) { + const last = this.parent.scopes.pop(); + if (last && last !== this) { + this.parent.scopes[this.index] = last; + last.index = this.index; + } + } + this.parent = void 0; + this._active = false; + } + } + } + function effectScope(detached) { + return new EffectScope(detached); + } + function recordEffectScope(effect, scope = activeEffectScope) { + if (scope && scope.active) { + scope.effects.push(effect); + } + } + function getCurrentScope() { + return activeEffectScope; + } + function onScopeDispose(fn) { + if (activeEffectScope) { + activeEffectScope.cleanups.push(fn); + } else { + warn$1( + `onScopeDispose() is called when there is no active effect scope to be associated with.` + ); + } + } + + const createDep = (effects) => { + const dep = new Set(effects); + dep.w = 0; + dep.n = 0; + return dep; + }; + const wasTracked = (dep) => (dep.w & trackOpBit) > 0; + const newTracked = (dep) => (dep.n & trackOpBit) > 0; + const initDepMarkers = ({ deps }) => { + if (deps.length) { + for (let i = 0; i < deps.length; i++) { + deps[i].w |= trackOpBit; + } + } + }; + const finalizeDepMarkers = (effect) => { + const { deps } = effect; + if (deps.length) { + let ptr = 0; + for (let i = 0; i < deps.length; i++) { + const dep = deps[i]; + if (wasTracked(dep) && !newTracked(dep)) { + dep.delete(effect); + } else { + deps[ptr++] = dep; + } + dep.w &= ~trackOpBit; + dep.n &= ~trackOpBit; + } + deps.length = ptr; + } + }; + + const targetMap = /* @__PURE__ */ new WeakMap(); + let effectTrackDepth = 0; + let trackOpBit = 1; + const maxMarkerBits = 30; + let activeEffect; + const ITERATE_KEY = Symbol("iterate" ); + const MAP_KEY_ITERATE_KEY = Symbol("Map key iterate" ); + class ReactiveEffect { + constructor(fn, scheduler = null, scope) { + this.fn = fn; + this.scheduler = scheduler; + this.active = true; + this.deps = []; + this.parent = void 0; + recordEffectScope(this, scope); + } + run() { + if (!this.active) { + return this.fn(); + } + let parent = activeEffect; + let lastShouldTrack = shouldTrack; + while (parent) { + if (parent === this) { + return; + } + parent = parent.parent; + } + try { + this.parent = activeEffect; + activeEffect = this; + shouldTrack = true; + trackOpBit = 1 << ++effectTrackDepth; + if (effectTrackDepth <= maxMarkerBits) { + initDepMarkers(this); + } else { + cleanupEffect(this); + } + return this.fn(); + } finally { + if (effectTrackDepth <= maxMarkerBits) { + finalizeDepMarkers(this); + } + trackOpBit = 1 << --effectTrackDepth; + activeEffect = this.parent; + shouldTrack = lastShouldTrack; + this.parent = void 0; + if (this.deferStop) { + this.stop(); + } + } + } + stop() { + if (activeEffect === this) { + this.deferStop = true; + } else if (this.active) { + cleanupEffect(this); + if (this.onStop) { + this.onStop(); + } + this.active = false; + } + } + } + function cleanupEffect(effect2) { + const { deps } = effect2; + if (deps.length) { + for (let i = 0; i < deps.length; i++) { + deps[i].delete(effect2); + } + deps.length = 0; + } + } + function effect(fn, options) { + if (fn.effect) { + fn = fn.effect.fn; + } + const _effect = new ReactiveEffect(fn); + if (options) { + extend(_effect, options); + if (options.scope) + recordEffectScope(_effect, options.scope); + } + if (!options || !options.lazy) { + _effect.run(); + } + const runner = _effect.run.bind(_effect); + runner.effect = _effect; + return runner; + } + function stop(runner) { + runner.effect.stop(); + } + let shouldTrack = true; + const trackStack = []; + function pauseTracking() { + trackStack.push(shouldTrack); + shouldTrack = false; + } + function resetTracking() { + const last = trackStack.pop(); + shouldTrack = last === void 0 ? true : last; + } + function track(target, type, key) { + if (shouldTrack && activeEffect) { + let depsMap = targetMap.get(target); + if (!depsMap) { + targetMap.set(target, depsMap = /* @__PURE__ */ new Map()); + } + let dep = depsMap.get(key); + if (!dep) { + depsMap.set(key, dep = createDep()); + } + const eventInfo = { effect: activeEffect, target, type, key } ; + trackEffects(dep, eventInfo); + } + } + function trackEffects(dep, debuggerEventExtraInfo) { + let shouldTrack2 = false; + if (effectTrackDepth <= maxMarkerBits) { + if (!newTracked(dep)) { + dep.n |= trackOpBit; + shouldTrack2 = !wasTracked(dep); + } + } else { + shouldTrack2 = !dep.has(activeEffect); + } + if (shouldTrack2) { + dep.add(activeEffect); + activeEffect.deps.push(dep); + if (activeEffect.onTrack) { + activeEffect.onTrack( + extend( + { + effect: activeEffect + }, + debuggerEventExtraInfo + ) + ); + } + } + } + function trigger(target, type, key, newValue, oldValue, oldTarget) { + const depsMap = targetMap.get(target); + if (!depsMap) { + return; + } + let deps = []; + if (type === "clear") { + deps = [...depsMap.values()]; + } else if (key === "length" && isArray(target)) { + const newLength = Number(newValue); + depsMap.forEach((dep, key2) => { + if (key2 === "length" || key2 >= newLength) { + deps.push(dep); + } + }); + } else { + if (key !== void 0) { + deps.push(depsMap.get(key)); + } + switch (type) { + case "add": + if (!isArray(target)) { + deps.push(depsMap.get(ITERATE_KEY)); + if (isMap(target)) { + deps.push(depsMap.get(MAP_KEY_ITERATE_KEY)); + } + } else if (isIntegerKey(key)) { + deps.push(depsMap.get("length")); + } + break; + case "delete": + if (!isArray(target)) { + deps.push(depsMap.get(ITERATE_KEY)); + if (isMap(target)) { + deps.push(depsMap.get(MAP_KEY_ITERATE_KEY)); + } + } + break; + case "set": + if (isMap(target)) { + deps.push(depsMap.get(ITERATE_KEY)); + } + break; + } + } + const eventInfo = { target, type, key, newValue, oldValue, oldTarget } ; + if (deps.length === 1) { + if (deps[0]) { + { + triggerEffects(deps[0], eventInfo); + } + } + } else { + const effects = []; + for (const dep of deps) { + if (dep) { + effects.push(...dep); + } + } + { + triggerEffects(createDep(effects), eventInfo); + } + } + } + function triggerEffects(dep, debuggerEventExtraInfo) { + const effects = isArray(dep) ? dep : [...dep]; + for (const effect2 of effects) { + if (effect2.computed) { + triggerEffect(effect2, debuggerEventExtraInfo); + } + } + for (const effect2 of effects) { + if (!effect2.computed) { + triggerEffect(effect2, debuggerEventExtraInfo); + } + } + } + function triggerEffect(effect2, debuggerEventExtraInfo) { + if (effect2 !== activeEffect || effect2.allowRecurse) { + if (effect2.onTrigger) { + effect2.onTrigger(extend({ effect: effect2 }, debuggerEventExtraInfo)); + } + if (effect2.scheduler) { + effect2.scheduler(); + } else { + effect2.run(); + } + } + } + function getDepFromReactive(object, key) { + var _a; + return (_a = targetMap.get(object)) == null ? void 0 : _a.get(key); + } + + const isNonTrackableKeys = /* @__PURE__ */ makeMap(`__proto__,__v_isRef,__isVue`); + const builtInSymbols = new Set( + /* @__PURE__ */ Object.getOwnPropertyNames(Symbol).filter((key) => key !== "arguments" && key !== "caller").map((key) => Symbol[key]).filter(isSymbol) + ); + const get$1 = /* @__PURE__ */ createGetter(); + const shallowGet = /* @__PURE__ */ createGetter(false, true); + const readonlyGet = /* @__PURE__ */ createGetter(true); + const shallowReadonlyGet = /* @__PURE__ */ createGetter(true, true); + const arrayInstrumentations = /* @__PURE__ */ createArrayInstrumentations(); + function createArrayInstrumentations() { + const instrumentations = {}; + ["includes", "indexOf", "lastIndexOf"].forEach((key) => { + instrumentations[key] = function(...args) { + const arr = toRaw(this); + for (let i = 0, l = this.length; i < l; i++) { + track(arr, "get", i + ""); + } + const res = arr[key](...args); + if (res === -1 || res === false) { + return arr[key](...args.map(toRaw)); + } else { + return res; + } + }; + }); + ["push", "pop", "shift", "unshift", "splice"].forEach((key) => { + instrumentations[key] = function(...args) { + pauseTracking(); + const res = toRaw(this)[key].apply(this, args); + resetTracking(); + return res; + }; + }); + return instrumentations; + } + function hasOwnProperty(key) { + const obj = toRaw(this); + track(obj, "has", key); + return obj.hasOwnProperty(key); + } + function createGetter(isReadonly2 = false, shallow = false) { + return function get2(target, key, receiver) { + if (key === "__v_isReactive") { + return !isReadonly2; + } else if (key === "__v_isReadonly") { + return isReadonly2; + } else if (key === "__v_isShallow") { + return shallow; + } else if (key === "__v_raw" && receiver === (isReadonly2 ? shallow ? shallowReadonlyMap : readonlyMap : shallow ? shallowReactiveMap : reactiveMap).get(target)) { + return target; + } + const targetIsArray = isArray(target); + if (!isReadonly2) { + if (targetIsArray && hasOwn(arrayInstrumentations, key)) { + return Reflect.get(arrayInstrumentations, key, receiver); + } + if (key === "hasOwnProperty") { + return hasOwnProperty; + } + } + const res = Reflect.get(target, key, receiver); + if (isSymbol(key) ? builtInSymbols.has(key) : isNonTrackableKeys(key)) { + return res; + } + if (!isReadonly2) { + track(target, "get", key); + } + if (shallow) { + return res; + } + if (isRef(res)) { + return targetIsArray && isIntegerKey(key) ? res : res.value; + } + if (isObject(res)) { + return isReadonly2 ? readonly(res) : reactive(res); + } + return res; + }; + } + const set$1 = /* @__PURE__ */ createSetter(); + const shallowSet = /* @__PURE__ */ createSetter(true); + function createSetter(shallow = false) { + return function set2(target, key, value, receiver) { + let oldValue = target[key]; + if (isReadonly(oldValue) && isRef(oldValue) && !isRef(value)) { + return false; + } + if (!shallow) { + if (!isShallow(value) && !isReadonly(value)) { + oldValue = toRaw(oldValue); + value = toRaw(value); + } + if (!isArray(target) && isRef(oldValue) && !isRef(value)) { + oldValue.value = value; + return true; + } + } + const hadKey = isArray(target) && isIntegerKey(key) ? Number(key) < target.length : hasOwn(target, key); + const result = Reflect.set(target, key, value, receiver); + if (target === toRaw(receiver)) { + if (!hadKey) { + trigger(target, "add", key, value); + } else if (hasChanged(value, oldValue)) { + trigger(target, "set", key, value, oldValue); + } + } + return result; + }; + } + function deleteProperty(target, key) { + const hadKey = hasOwn(target, key); + const oldValue = target[key]; + const result = Reflect.deleteProperty(target, key); + if (result && hadKey) { + trigger(target, "delete", key, void 0, oldValue); + } + return result; + } + function has$1(target, key) { + const result = Reflect.has(target, key); + if (!isSymbol(key) || !builtInSymbols.has(key)) { + track(target, "has", key); + } + return result; + } + function ownKeys(target) { + track(target, "iterate", isArray(target) ? "length" : ITERATE_KEY); + return Reflect.ownKeys(target); + } + const mutableHandlers = { + get: get$1, + set: set$1, + deleteProperty, + has: has$1, + ownKeys + }; + const readonlyHandlers = { + get: readonlyGet, + set(target, key) { + { + warn$1( + `Set operation on key "${String(key)}" failed: target is readonly.`, + target + ); + } + return true; + }, + deleteProperty(target, key) { + { + warn$1( + `Delete operation on key "${String(key)}" failed: target is readonly.`, + target + ); + } + return true; + } + }; + const shallowReactiveHandlers = /* @__PURE__ */ extend( + {}, + mutableHandlers, + { + get: shallowGet, + set: shallowSet + } + ); + const shallowReadonlyHandlers = /* @__PURE__ */ extend( + {}, + readonlyHandlers, + { + get: shallowReadonlyGet + } + ); + + const toShallow = (value) => value; + const getProto = (v) => Reflect.getPrototypeOf(v); + function get(target, key, isReadonly = false, isShallow = false) { + target = target["__v_raw"]; + const rawTarget = toRaw(target); + const rawKey = toRaw(key); + if (!isReadonly) { + if (key !== rawKey) { + track(rawTarget, "get", key); + } + track(rawTarget, "get", rawKey); + } + const { has: has2 } = getProto(rawTarget); + const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive; + if (has2.call(rawTarget, key)) { + return wrap(target.get(key)); + } else if (has2.call(rawTarget, rawKey)) { + return wrap(target.get(rawKey)); + } else if (target !== rawTarget) { + target.get(key); + } + } + function has(key, isReadonly = false) { + const target = this["__v_raw"]; + const rawTarget = toRaw(target); + const rawKey = toRaw(key); + if (!isReadonly) { + if (key !== rawKey) { + track(rawTarget, "has", key); + } + track(rawTarget, "has", rawKey); + } + return key === rawKey ? target.has(key) : target.has(key) || target.has(rawKey); + } + function size(target, isReadonly = false) { + target = target["__v_raw"]; + !isReadonly && track(toRaw(target), "iterate", ITERATE_KEY); + return Reflect.get(target, "size", target); + } + function add(value) { + value = toRaw(value); + const target = toRaw(this); + const proto = getProto(target); + const hadKey = proto.has.call(target, value); + if (!hadKey) { + target.add(value); + trigger(target, "add", value, value); + } + return this; + } + function set(key, value) { + value = toRaw(value); + const target = toRaw(this); + const { has: has2, get: get2 } = getProto(target); + let hadKey = has2.call(target, key); + if (!hadKey) { + key = toRaw(key); + hadKey = has2.call(target, key); + } else { + checkIdentityKeys(target, has2, key); + } + const oldValue = get2.call(target, key); + target.set(key, value); + if (!hadKey) { + trigger(target, "add", key, value); + } else if (hasChanged(value, oldValue)) { + trigger(target, "set", key, value, oldValue); + } + return this; + } + function deleteEntry(key) { + const target = toRaw(this); + const { has: has2, get: get2 } = getProto(target); + let hadKey = has2.call(target, key); + if (!hadKey) { + key = toRaw(key); + hadKey = has2.call(target, key); + } else { + checkIdentityKeys(target, has2, key); + } + const oldValue = get2 ? get2.call(target, key) : void 0; + const result = target.delete(key); + if (hadKey) { + trigger(target, "delete", key, void 0, oldValue); + } + return result; + } + function clear() { + const target = toRaw(this); + const hadItems = target.size !== 0; + const oldTarget = isMap(target) ? new Map(target) : new Set(target) ; + const result = target.clear(); + if (hadItems) { + trigger(target, "clear", void 0, void 0, oldTarget); + } + return result; + } + function createForEach(isReadonly, isShallow) { + return function forEach(callback, thisArg) { + const observed = this; + const target = observed["__v_raw"]; + const rawTarget = toRaw(target); + const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive; + !isReadonly && track(rawTarget, "iterate", ITERATE_KEY); + return target.forEach((value, key) => { + return callback.call(thisArg, wrap(value), wrap(key), observed); + }); + }; + } + function createIterableMethod(method, isReadonly, isShallow) { + return function(...args) { + const target = this["__v_raw"]; + const rawTarget = toRaw(target); + const targetIsMap = isMap(rawTarget); + const isPair = method === "entries" || method === Symbol.iterator && targetIsMap; + const isKeyOnly = method === "keys" && targetIsMap; + const innerIterator = target[method](...args); + const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive; + !isReadonly && track( + rawTarget, + "iterate", + isKeyOnly ? MAP_KEY_ITERATE_KEY : ITERATE_KEY + ); + return { + // iterator protocol + next() { + const { value, done } = innerIterator.next(); + return done ? { value, done } : { + value: isPair ? [wrap(value[0]), wrap(value[1])] : wrap(value), + done + }; + }, + // iterable protocol + [Symbol.iterator]() { + return this; + } + }; + }; + } + function createReadonlyMethod(type) { + return function(...args) { + { + const key = args[0] ? `on key "${args[0]}" ` : ``; + console.warn( + `${capitalize(type)} operation ${key}failed: target is readonly.`, + toRaw(this) + ); + } + return type === "delete" ? false : this; + }; + } + function createInstrumentations() { + const mutableInstrumentations2 = { + get(key) { + return get(this, key); + }, + get size() { + return size(this); + }, + has, + add, + set, + delete: deleteEntry, + clear, + forEach: createForEach(false, false) + }; + const shallowInstrumentations2 = { + get(key) { + return get(this, key, false, true); + }, + get size() { + return size(this); + }, + has, + add, + set, + delete: deleteEntry, + clear, + forEach: createForEach(false, true) + }; + const readonlyInstrumentations2 = { + get(key) { + return get(this, key, true); + }, + get size() { + return size(this, true); + }, + has(key) { + return has.call(this, key, true); + }, + add: createReadonlyMethod("add"), + set: createReadonlyMethod("set"), + delete: createReadonlyMethod("delete"), + clear: createReadonlyMethod("clear"), + forEach: createForEach(true, false) + }; + const shallowReadonlyInstrumentations2 = { + get(key) { + return get(this, key, true, true); + }, + get size() { + return size(this, true); + }, + has(key) { + return has.call(this, key, true); + }, + add: createReadonlyMethod("add"), + set: createReadonlyMethod("set"), + delete: createReadonlyMethod("delete"), + clear: createReadonlyMethod("clear"), + forEach: createForEach(true, true) + }; + const iteratorMethods = ["keys", "values", "entries", Symbol.iterator]; + iteratorMethods.forEach((method) => { + mutableInstrumentations2[method] = createIterableMethod( + method, + false, + false + ); + readonlyInstrumentations2[method] = createIterableMethod( + method, + true, + false + ); + shallowInstrumentations2[method] = createIterableMethod( + method, + false, + true + ); + shallowReadonlyInstrumentations2[method] = createIterableMethod( + method, + true, + true + ); + }); + return [ + mutableInstrumentations2, + readonlyInstrumentations2, + shallowInstrumentations2, + shallowReadonlyInstrumentations2 + ]; + } + const [ + mutableInstrumentations, + readonlyInstrumentations, + shallowInstrumentations, + shallowReadonlyInstrumentations + ] = /* @__PURE__ */ createInstrumentations(); + function createInstrumentationGetter(isReadonly, shallow) { + const instrumentations = shallow ? isReadonly ? shallowReadonlyInstrumentations : shallowInstrumentations : isReadonly ? readonlyInstrumentations : mutableInstrumentations; + return (target, key, receiver) => { + if (key === "__v_isReactive") { + return !isReadonly; + } else if (key === "__v_isReadonly") { + return isReadonly; + } else if (key === "__v_raw") { + return target; + } + return Reflect.get( + hasOwn(instrumentations, key) && key in target ? instrumentations : target, + key, + receiver + ); + }; + } + const mutableCollectionHandlers = { + get: /* @__PURE__ */ createInstrumentationGetter(false, false) + }; + const shallowCollectionHandlers = { + get: /* @__PURE__ */ createInstrumentationGetter(false, true) + }; + const readonlyCollectionHandlers = { + get: /* @__PURE__ */ createInstrumentationGetter(true, false) + }; + const shallowReadonlyCollectionHandlers = { + get: /* @__PURE__ */ createInstrumentationGetter(true, true) + }; + function checkIdentityKeys(target, has2, key) { + const rawKey = toRaw(key); + if (rawKey !== key && has2.call(target, rawKey)) { + const type = toRawType(target); + console.warn( + `Reactive ${type} contains both the raw and reactive versions of the same object${type === `Map` ? ` as keys` : ``}, which can lead to inconsistencies. Avoid differentiating between the raw and reactive versions of an object and only use the reactive version if possible.` + ); + } + } + + const reactiveMap = /* @__PURE__ */ new WeakMap(); + const shallowReactiveMap = /* @__PURE__ */ new WeakMap(); + const readonlyMap = /* @__PURE__ */ new WeakMap(); + const shallowReadonlyMap = /* @__PURE__ */ new WeakMap(); + function targetTypeMap(rawType) { + switch (rawType) { + case "Object": + case "Array": + return 1 /* COMMON */; + case "Map": + case "Set": + case "WeakMap": + case "WeakSet": + return 2 /* COLLECTION */; + default: + return 0 /* INVALID */; + } + } + function getTargetType(value) { + return value["__v_skip"] || !Object.isExtensible(value) ? 0 /* INVALID */ : targetTypeMap(toRawType(value)); + } + function reactive(target) { + if (isReadonly(target)) { + return target; + } + return createReactiveObject( + target, + false, + mutableHandlers, + mutableCollectionHandlers, + reactiveMap + ); + } + function shallowReactive(target) { + return createReactiveObject( + target, + false, + shallowReactiveHandlers, + shallowCollectionHandlers, + shallowReactiveMap + ); + } + function readonly(target) { + return createReactiveObject( + target, + true, + readonlyHandlers, + readonlyCollectionHandlers, + readonlyMap + ); + } + function shallowReadonly(target) { + return createReactiveObject( + target, + true, + shallowReadonlyHandlers, + shallowReadonlyCollectionHandlers, + shallowReadonlyMap + ); + } + function createReactiveObject(target, isReadonly2, baseHandlers, collectionHandlers, proxyMap) { + if (!isObject(target)) { + { + console.warn(`value cannot be made reactive: ${String(target)}`); + } + return target; + } + if (target["__v_raw"] && !(isReadonly2 && target["__v_isReactive"])) { + return target; + } + const existingProxy = proxyMap.get(target); + if (existingProxy) { + return existingProxy; + } + const targetType = getTargetType(target); + if (targetType === 0 /* INVALID */) { + return target; + } + const proxy = new Proxy( + target, + targetType === 2 /* COLLECTION */ ? collectionHandlers : baseHandlers + ); + proxyMap.set(target, proxy); + return proxy; + } + function isReactive(value) { + if (isReadonly(value)) { + return isReactive(value["__v_raw"]); + } + return !!(value && value["__v_isReactive"]); + } + function isReadonly(value) { + return !!(value && value["__v_isReadonly"]); + } + function isShallow(value) { + return !!(value && value["__v_isShallow"]); + } + function isProxy(value) { + return isReactive(value) || isReadonly(value); + } + function toRaw(observed) { + const raw = observed && observed["__v_raw"]; + return raw ? toRaw(raw) : observed; + } + function markRaw(value) { + def(value, "__v_skip", true); + return value; + } + const toReactive = (value) => isObject(value) ? reactive(value) : value; + const toReadonly = (value) => isObject(value) ? readonly(value) : value; + + function trackRefValue(ref2) { + if (shouldTrack && activeEffect) { + ref2 = toRaw(ref2); + { + trackEffects(ref2.dep || (ref2.dep = createDep()), { + target: ref2, + type: "get", + key: "value" + }); + } + } + } + function triggerRefValue(ref2, newVal) { + ref2 = toRaw(ref2); + const dep = ref2.dep; + if (dep) { + { + triggerEffects(dep, { + target: ref2, + type: "set", + key: "value", + newValue: newVal + }); + } + } + } + function isRef(r) { + return !!(r && r.__v_isRef === true); + } + function ref(value) { + return createRef(value, false); + } + function shallowRef(value) { + return createRef(value, true); + } + function createRef(rawValue, shallow) { + if (isRef(rawValue)) { + return rawValue; + } + return new RefImpl(rawValue, shallow); + } + class RefImpl { + constructor(value, __v_isShallow) { + this.__v_isShallow = __v_isShallow; + this.dep = void 0; + this.__v_isRef = true; + this._rawValue = __v_isShallow ? value : toRaw(value); + this._value = __v_isShallow ? value : toReactive(value); + } + get value() { + trackRefValue(this); + return this._value; + } + set value(newVal) { + const useDirectValue = this.__v_isShallow || isShallow(newVal) || isReadonly(newVal); + newVal = useDirectValue ? newVal : toRaw(newVal); + if (hasChanged(newVal, this._rawValue)) { + this._rawValue = newVal; + this._value = useDirectValue ? newVal : toReactive(newVal); + triggerRefValue(this, newVal); + } + } + } + function triggerRef(ref2) { + triggerRefValue(ref2, ref2.value ); + } + function unref(ref2) { + return isRef(ref2) ? ref2.value : ref2; + } + function toValue(source) { + return isFunction(source) ? source() : unref(source); + } + const shallowUnwrapHandlers = { + get: (target, key, receiver) => unref(Reflect.get(target, key, receiver)), + set: (target, key, value, receiver) => { + const oldValue = target[key]; + if (isRef(oldValue) && !isRef(value)) { + oldValue.value = value; + return true; + } else { + return Reflect.set(target, key, value, receiver); + } + } + }; + function proxyRefs(objectWithRefs) { + return isReactive(objectWithRefs) ? objectWithRefs : new Proxy(objectWithRefs, shallowUnwrapHandlers); + } + class CustomRefImpl { + constructor(factory) { + this.dep = void 0; + this.__v_isRef = true; + const { get, set } = factory( + () => trackRefValue(this), + () => triggerRefValue(this) + ); + this._get = get; + this._set = set; + } + get value() { + return this._get(); + } + set value(newVal) { + this._set(newVal); + } + } + function customRef(factory) { + return new CustomRefImpl(factory); + } + function toRefs(object) { + if (!isProxy(object)) { + console.warn(`toRefs() expects a reactive object but received a plain one.`); + } + const ret = isArray(object) ? new Array(object.length) : {}; + for (const key in object) { + ret[key] = propertyToRef(object, key); + } + return ret; + } + class ObjectRefImpl { + constructor(_object, _key, _defaultValue) { + this._object = _object; + this._key = _key; + this._defaultValue = _defaultValue; + this.__v_isRef = true; + } + get value() { + const val = this._object[this._key]; + return val === void 0 ? this._defaultValue : val; + } + set value(newVal) { + this._object[this._key] = newVal; + } + get dep() { + return getDepFromReactive(toRaw(this._object), this._key); + } + } + class GetterRefImpl { + constructor(_getter) { + this._getter = _getter; + this.__v_isRef = true; + this.__v_isReadonly = true; + } + get value() { + return this._getter(); + } + } + function toRef(source, key, defaultValue) { + if (isRef(source)) { + return source; + } else if (isFunction(source)) { + return new GetterRefImpl(source); + } else if (isObject(source) && arguments.length > 1) { + return propertyToRef(source, key, defaultValue); + } else { + return ref(source); + } + } + function propertyToRef(source, key, defaultValue) { + const val = source[key]; + return isRef(val) ? val : new ObjectRefImpl( + source, + key, + defaultValue + ); + } + + class ComputedRefImpl { + constructor(getter, _setter, isReadonly, isSSR) { + this._setter = _setter; + this.dep = void 0; + this.__v_isRef = true; + this["__v_isReadonly"] = false; + this._dirty = true; + this.effect = new ReactiveEffect(getter, () => { + if (!this._dirty) { + this._dirty = true; + triggerRefValue(this); + } + }); + this.effect.computed = this; + this.effect.active = this._cacheable = !isSSR; + this["__v_isReadonly"] = isReadonly; + } + get value() { + const self = toRaw(this); + trackRefValue(self); + if (self._dirty || !self._cacheable) { + self._dirty = false; + self._value = self.effect.run(); + } + return self._value; + } + set value(newValue) { + this._setter(newValue); + } + } + function computed$1(getterOrOptions, debugOptions, isSSR = false) { + let getter; + let setter; + const onlyGetter = isFunction(getterOrOptions); + if (onlyGetter) { + getter = getterOrOptions; + setter = () => { + console.warn("Write operation failed: computed value is readonly"); + } ; + } else { + getter = getterOrOptions.get; + setter = getterOrOptions.set; + } + const cRef = new ComputedRefImpl(getter, setter, onlyGetter || !setter, isSSR); + if (debugOptions && !isSSR) { + cRef.effect.onTrack = debugOptions.onTrack; + cRef.effect.onTrigger = debugOptions.onTrigger; + } + return cRef; + } + + const stack = []; + function pushWarningContext(vnode) { + stack.push(vnode); + } + function popWarningContext() { + stack.pop(); + } + function warn(msg, ...args) { + pauseTracking(); + const instance = stack.length ? stack[stack.length - 1].component : null; + const appWarnHandler = instance && instance.appContext.config.warnHandler; + const trace = getComponentTrace(); + if (appWarnHandler) { + callWithErrorHandling( + appWarnHandler, + instance, + 11, + [ + msg + args.join(""), + instance && instance.proxy, + trace.map( + ({ vnode }) => `at <${formatComponentName(instance, vnode.type)}>` + ).join("\n"), + trace + ] + ); + } else { + const warnArgs = [`[Vue warn]: ${msg}`, ...args]; + if (trace.length && // avoid spamming console during tests + true) { + warnArgs.push(` +`, ...formatTrace(trace)); + } + console.warn(...warnArgs); + } + resetTracking(); + } + function getComponentTrace() { + let currentVNode = stack[stack.length - 1]; + if (!currentVNode) { + return []; + } + const normalizedStack = []; + while (currentVNode) { + const last = normalizedStack[0]; + if (last && last.vnode === currentVNode) { + last.recurseCount++; + } else { + normalizedStack.push({ + vnode: currentVNode, + recurseCount: 0 + }); + } + const parentInstance = currentVNode.component && currentVNode.component.parent; + currentVNode = parentInstance && parentInstance.vnode; + } + return normalizedStack; + } + function formatTrace(trace) { + const logs = []; + trace.forEach((entry, i) => { + logs.push(...i === 0 ? [] : [` +`], ...formatTraceEntry(entry)); + }); + return logs; + } + function formatTraceEntry({ vnode, recurseCount }) { + const postfix = recurseCount > 0 ? `... (${recurseCount} recursive calls)` : ``; + const isRoot = vnode.component ? vnode.component.parent == null : false; + const open = ` at <${formatComponentName( + vnode.component, + vnode.type, + isRoot + )}`; + const close = `>` + postfix; + return vnode.props ? [open, ...formatProps(vnode.props), close] : [open + close]; + } + function formatProps(props) { + const res = []; + const keys = Object.keys(props); + keys.slice(0, 3).forEach((key) => { + res.push(...formatProp(key, props[key])); + }); + if (keys.length > 3) { + res.push(` ...`); + } + return res; + } + function formatProp(key, value, raw) { + if (isString(value)) { + value = JSON.stringify(value); + return raw ? value : [`${key}=${value}`]; + } else if (typeof value === "number" || typeof value === "boolean" || value == null) { + return raw ? value : [`${key}=${value}`]; + } else if (isRef(value)) { + value = formatProp(key, toRaw(value.value), true); + return raw ? value : [`${key}=Ref<`, value, `>`]; + } else if (isFunction(value)) { + return [`${key}=fn${value.name ? `<${value.name}>` : ``}`]; + } else { + value = toRaw(value); + return raw ? value : [`${key}=`, value]; + } + } + function assertNumber(val, type) { + if (val === void 0) { + return; + } else if (typeof val !== "number") { + warn(`${type} is not a valid number - got ${JSON.stringify(val)}.`); + } else if (isNaN(val)) { + warn(`${type} is NaN - the duration expression might be incorrect.`); + } + } + + const ErrorTypeStrings = { + ["sp"]: "serverPrefetch hook", + ["bc"]: "beforeCreate hook", + ["c"]: "created hook", + ["bm"]: "beforeMount hook", + ["m"]: "mounted hook", + ["bu"]: "beforeUpdate hook", + ["u"]: "updated", + ["bum"]: "beforeUnmount hook", + ["um"]: "unmounted hook", + ["a"]: "activated hook", + ["da"]: "deactivated hook", + ["ec"]: "errorCaptured hook", + ["rtc"]: "renderTracked hook", + ["rtg"]: "renderTriggered hook", + [0]: "setup function", + [1]: "render function", + [2]: "watcher getter", + [3]: "watcher callback", + [4]: "watcher cleanup function", + [5]: "native event handler", + [6]: "component event handler", + [7]: "vnode hook", + [8]: "directive hook", + [9]: "transition hook", + [10]: "app errorHandler", + [11]: "app warnHandler", + [12]: "ref function", + [13]: "async component loader", + [14]: "scheduler flush. This is likely a Vue internals bug. Please open an issue at https://new-issue.vuejs.org/?repo=vuejs/core" + }; + function callWithErrorHandling(fn, instance, type, args) { + let res; + try { + res = args ? fn(...args) : fn(); + } catch (err) { + handleError(err, instance, type); + } + return res; + } + function callWithAsyncErrorHandling(fn, instance, type, args) { + if (isFunction(fn)) { + const res = callWithErrorHandling(fn, instance, type, args); + if (res && isPromise(res)) { + res.catch((err) => { + handleError(err, instance, type); + }); + } + return res; + } + const values = []; + for (let i = 0; i < fn.length; i++) { + values.push(callWithAsyncErrorHandling(fn[i], instance, type, args)); + } + return values; + } + function handleError(err, instance, type, throwInDev = true) { + const contextVNode = instance ? instance.vnode : null; + if (instance) { + let cur = instance.parent; + const exposedInstance = instance.proxy; + const errorInfo = ErrorTypeStrings[type] ; + while (cur) { + const errorCapturedHooks = cur.ec; + if (errorCapturedHooks) { + for (let i = 0; i < errorCapturedHooks.length; i++) { + if (errorCapturedHooks[i](err, exposedInstance, errorInfo) === false) { + return; + } + } + } + cur = cur.parent; + } + const appErrorHandler = instance.appContext.config.errorHandler; + if (appErrorHandler) { + callWithErrorHandling( + appErrorHandler, + null, + 10, + [err, exposedInstance, errorInfo] + ); + return; + } + } + logError(err, type, contextVNode, throwInDev); + } + function logError(err, type, contextVNode, throwInDev = true) { + { + const info = ErrorTypeStrings[type]; + if (contextVNode) { + pushWarningContext(contextVNode); + } + warn(`Unhandled error${info ? ` during execution of ${info}` : ``}`); + if (contextVNode) { + popWarningContext(); + } + if (throwInDev) { + throw err; + } else { + console.error(err); + } + } + } + + let isFlushing = false; + let isFlushPending = false; + const queue = []; + let flushIndex = 0; + const pendingPostFlushCbs = []; + let activePostFlushCbs = null; + let postFlushIndex = 0; + const resolvedPromise = /* @__PURE__ */ Promise.resolve(); + let currentFlushPromise = null; + const RECURSION_LIMIT = 100; + function nextTick(fn) { + const p = currentFlushPromise || resolvedPromise; + return fn ? p.then(this ? fn.bind(this) : fn) : p; + } + function findInsertionIndex(id) { + let start = flushIndex + 1; + let end = queue.length; + while (start < end) { + const middle = start + end >>> 1; + const middleJobId = getId(queue[middle]); + middleJobId < id ? start = middle + 1 : end = middle; + } + return start; + } + function queueJob(job) { + if (!queue.length || !queue.includes( + job, + isFlushing && job.allowRecurse ? flushIndex + 1 : flushIndex + )) { + if (job.id == null) { + queue.push(job); + } else { + queue.splice(findInsertionIndex(job.id), 0, job); + } + queueFlush(); + } + } + function queueFlush() { + if (!isFlushing && !isFlushPending) { + isFlushPending = true; + currentFlushPromise = resolvedPromise.then(flushJobs); + } + } + function invalidateJob(job) { + const i = queue.indexOf(job); + if (i > flushIndex) { + queue.splice(i, 1); + } + } + function queuePostFlushCb(cb) { + if (!isArray(cb)) { + if (!activePostFlushCbs || !activePostFlushCbs.includes( + cb, + cb.allowRecurse ? postFlushIndex + 1 : postFlushIndex + )) { + pendingPostFlushCbs.push(cb); + } + } else { + pendingPostFlushCbs.push(...cb); + } + queueFlush(); + } + function flushPreFlushCbs(seen, i = isFlushing ? flushIndex + 1 : 0) { + { + seen = seen || /* @__PURE__ */ new Map(); + } + for (; i < queue.length; i++) { + const cb = queue[i]; + if (cb && cb.pre) { + if (checkRecursiveUpdates(seen, cb)) { + continue; + } + queue.splice(i, 1); + i--; + cb(); + } + } + } + function flushPostFlushCbs(seen) { + if (pendingPostFlushCbs.length) { + const deduped = [...new Set(pendingPostFlushCbs)]; + pendingPostFlushCbs.length = 0; + if (activePostFlushCbs) { + activePostFlushCbs.push(...deduped); + return; + } + activePostFlushCbs = deduped; + { + seen = seen || /* @__PURE__ */ new Map(); + } + activePostFlushCbs.sort((a, b) => getId(a) - getId(b)); + for (postFlushIndex = 0; postFlushIndex < activePostFlushCbs.length; postFlushIndex++) { + if (checkRecursiveUpdates(seen, activePostFlushCbs[postFlushIndex])) { + continue; + } + activePostFlushCbs[postFlushIndex](); + } + activePostFlushCbs = null; + postFlushIndex = 0; + } + } + const getId = (job) => job.id == null ? Infinity : job.id; + const comparator = (a, b) => { + const diff = getId(a) - getId(b); + if (diff === 0) { + if (a.pre && !b.pre) + return -1; + if (b.pre && !a.pre) + return 1; + } + return diff; + }; + function flushJobs(seen) { + isFlushPending = false; + isFlushing = true; + { + seen = seen || /* @__PURE__ */ new Map(); + } + queue.sort(comparator); + const check = (job) => checkRecursiveUpdates(seen, job) ; + try { + for (flushIndex = 0; flushIndex < queue.length; flushIndex++) { + const job = queue[flushIndex]; + if (job && job.active !== false) { + if (check(job)) { + continue; + } + callWithErrorHandling(job, null, 14); + } + } + } finally { + flushIndex = 0; + queue.length = 0; + flushPostFlushCbs(seen); + isFlushing = false; + currentFlushPromise = null; + if (queue.length || pendingPostFlushCbs.length) { + flushJobs(seen); + } + } + } + function checkRecursiveUpdates(seen, fn) { + if (!seen.has(fn)) { + seen.set(fn, 1); + } else { + const count = seen.get(fn); + if (count > RECURSION_LIMIT) { + const instance = fn.ownerInstance; + const componentName = instance && getComponentName(instance.type); + warn( + `Maximum recursive updates exceeded${componentName ? ` in component <${componentName}>` : ``}. This means you have a reactive effect that is mutating its own dependencies and thus recursively triggering itself. Possible sources include component template, render function, updated hook or watcher source function.` + ); + return true; + } else { + seen.set(fn, count + 1); + } + } + } + + let isHmrUpdating = false; + const hmrDirtyComponents = /* @__PURE__ */ new Set(); + { + getGlobalThis().__VUE_HMR_RUNTIME__ = { + createRecord: tryWrap(createRecord), + rerender: tryWrap(rerender), + reload: tryWrap(reload) + }; + } + const map = /* @__PURE__ */ new Map(); + function registerHMR(instance) { + const id = instance.type.__hmrId; + let record = map.get(id); + if (!record) { + createRecord(id, instance.type); + record = map.get(id); + } + record.instances.add(instance); + } + function unregisterHMR(instance) { + map.get(instance.type.__hmrId).instances.delete(instance); + } + function createRecord(id, initialDef) { + if (map.has(id)) { + return false; + } + map.set(id, { + initialDef: normalizeClassComponent(initialDef), + instances: /* @__PURE__ */ new Set() + }); + return true; + } + function normalizeClassComponent(component) { + return isClassComponent(component) ? component.__vccOpts : component; + } + function rerender(id, newRender) { + const record = map.get(id); + if (!record) { + return; + } + record.initialDef.render = newRender; + [...record.instances].forEach((instance) => { + if (newRender) { + instance.render = newRender; + normalizeClassComponent(instance.type).render = newRender; + } + instance.renderCache = []; + isHmrUpdating = true; + instance.update(); + isHmrUpdating = false; + }); + } + function reload(id, newComp) { + const record = map.get(id); + if (!record) + return; + newComp = normalizeClassComponent(newComp); + updateComponentDef(record.initialDef, newComp); + const instances = [...record.instances]; + for (const instance of instances) { + const oldComp = normalizeClassComponent(instance.type); + if (!hmrDirtyComponents.has(oldComp)) { + if (oldComp !== record.initialDef) { + updateComponentDef(oldComp, newComp); + } + hmrDirtyComponents.add(oldComp); + } + instance.appContext.propsCache.delete(instance.type); + instance.appContext.emitsCache.delete(instance.type); + instance.appContext.optionsCache.delete(instance.type); + if (instance.ceReload) { + hmrDirtyComponents.add(oldComp); + instance.ceReload(newComp.styles); + hmrDirtyComponents.delete(oldComp); + } else if (instance.parent) { + queueJob(instance.parent.update); + } else if (instance.appContext.reload) { + instance.appContext.reload(); + } else if (typeof window !== "undefined") { + window.location.reload(); + } else { + console.warn( + "[HMR] Root or manually mounted instance modified. Full reload required." + ); + } + } + queuePostFlushCb(() => { + for (const instance of instances) { + hmrDirtyComponents.delete( + normalizeClassComponent(instance.type) + ); + } + }); + } + function updateComponentDef(oldComp, newComp) { + extend(oldComp, newComp); + for (const key in oldComp) { + if (key !== "__file" && !(key in newComp)) { + delete oldComp[key]; + } + } + } + function tryWrap(fn) { + return (id, arg) => { + try { + return fn(id, arg); + } catch (e) { + console.error(e); + console.warn( + `[HMR] Something went wrong during Vue component hot-reload. Full reload required.` + ); + } + }; + } + + exports.devtools = void 0; + let buffer = []; + let devtoolsNotInstalled = false; + function emit$1(event, ...args) { + if (exports.devtools) { + exports.devtools.emit(event, ...args); + } else if (!devtoolsNotInstalled) { + buffer.push({ event, args }); + } + } + function setDevtoolsHook(hook, target) { + var _a, _b; + exports.devtools = hook; + if (exports.devtools) { + exports.devtools.enabled = true; + buffer.forEach(({ event, args }) => exports.devtools.emit(event, ...args)); + buffer = []; + } else if ( + // handle late devtools injection - only do this if we are in an actual + // browser environment to avoid the timer handle stalling test runner exit + // (#4815) + typeof window !== "undefined" && // some envs mock window but not fully + window.HTMLElement && // also exclude jsdom + !((_b = (_a = window.navigator) == null ? void 0 : _a.userAgent) == null ? void 0 : _b.includes("jsdom")) + ) { + const replay = target.__VUE_DEVTOOLS_HOOK_REPLAY__ = target.__VUE_DEVTOOLS_HOOK_REPLAY__ || []; + replay.push((newHook) => { + setDevtoolsHook(newHook, target); + }); + setTimeout(() => { + if (!exports.devtools) { + target.__VUE_DEVTOOLS_HOOK_REPLAY__ = null; + devtoolsNotInstalled = true; + buffer = []; + } + }, 3e3); + } else { + devtoolsNotInstalled = true; + buffer = []; + } + } + function devtoolsInitApp(app, version) { + emit$1("app:init" /* APP_INIT */, app, version, { + Fragment, + Text, + Comment, + Static + }); + } + function devtoolsUnmountApp(app) { + emit$1("app:unmount" /* APP_UNMOUNT */, app); + } + const devtoolsComponentAdded = /* @__PURE__ */ createDevtoolsComponentHook( + "component:added" /* COMPONENT_ADDED */ + ); + const devtoolsComponentUpdated = /* @__PURE__ */ createDevtoolsComponentHook("component:updated" /* COMPONENT_UPDATED */); + const _devtoolsComponentRemoved = /* @__PURE__ */ createDevtoolsComponentHook( + "component:removed" /* COMPONENT_REMOVED */ + ); + const devtoolsComponentRemoved = (component) => { + if (exports.devtools && typeof exports.devtools.cleanupBuffer === "function" && // remove the component if it wasn't buffered + !exports.devtools.cleanupBuffer(component)) { + _devtoolsComponentRemoved(component); + } + }; + function createDevtoolsComponentHook(hook) { + return (component) => { + emit$1( + hook, + component.appContext.app, + component.uid, + component.parent ? component.parent.uid : void 0, + component + ); + }; + } + const devtoolsPerfStart = /* @__PURE__ */ createDevtoolsPerformanceHook( + "perf:start" /* PERFORMANCE_START */ + ); + const devtoolsPerfEnd = /* @__PURE__ */ createDevtoolsPerformanceHook( + "perf:end" /* PERFORMANCE_END */ + ); + function createDevtoolsPerformanceHook(hook) { + return (component, type, time) => { + emit$1(hook, component.appContext.app, component.uid, component, type, time); + }; + } + function devtoolsComponentEmit(component, event, params) { + emit$1( + "component:emit" /* COMPONENT_EMIT */, + component.appContext.app, + component, + event, + params + ); + } + + function emit(instance, event, ...rawArgs) { + if (instance.isUnmounted) + return; + const props = instance.vnode.props || EMPTY_OBJ; + { + const { + emitsOptions, + propsOptions: [propsOptions] + } = instance; + if (emitsOptions) { + if (!(event in emitsOptions) && true) { + if (!propsOptions || !(toHandlerKey(event) in propsOptions)) { + warn( + `Component emitted event "${event}" but it is neither declared in the emits option nor as an "${toHandlerKey(event)}" prop.` + ); + } + } else { + const validator = emitsOptions[event]; + if (isFunction(validator)) { + const isValid = validator(...rawArgs); + if (!isValid) { + warn( + `Invalid event arguments: event validation failed for event "${event}".` + ); + } + } + } + } + } + let args = rawArgs; + const isModelListener = event.startsWith("update:"); + const modelArg = isModelListener && event.slice(7); + if (modelArg && modelArg in props) { + const modifiersKey = `${modelArg === "modelValue" ? "model" : modelArg}Modifiers`; + const { number, trim } = props[modifiersKey] || EMPTY_OBJ; + if (trim) { + args = rawArgs.map((a) => isString(a) ? a.trim() : a); + } + if (number) { + args = rawArgs.map(looseToNumber); + } + } + { + devtoolsComponentEmit(instance, event, args); + } + { + const lowerCaseEvent = event.toLowerCase(); + if (lowerCaseEvent !== event && props[toHandlerKey(lowerCaseEvent)]) { + warn( + `Event "${lowerCaseEvent}" is emitted in component ${formatComponentName( + instance, + instance.type + )} but the handler is registered for "${event}". Note that HTML attributes are case-insensitive and you cannot use v-on to listen to camelCase events when using in-DOM templates. You should probably use "${hyphenate(event)}" instead of "${event}".` + ); + } + } + let handlerName; + let handler = props[handlerName = toHandlerKey(event)] || // also try camelCase event handler (#2249) + props[handlerName = toHandlerKey(camelize(event))]; + if (!handler && isModelListener) { + handler = props[handlerName = toHandlerKey(hyphenate(event))]; + } + if (handler) { + callWithAsyncErrorHandling( + handler, + instance, + 6, + args + ); + } + const onceHandler = props[handlerName + `Once`]; + if (onceHandler) { + if (!instance.emitted) { + instance.emitted = {}; + } else if (instance.emitted[handlerName]) { + return; + } + instance.emitted[handlerName] = true; + callWithAsyncErrorHandling( + onceHandler, + instance, + 6, + args + ); + } + } + function normalizeEmitsOptions(comp, appContext, asMixin = false) { + const cache = appContext.emitsCache; + const cached = cache.get(comp); + if (cached !== void 0) { + return cached; + } + const raw = comp.emits; + let normalized = {}; + let hasExtends = false; + if (!isFunction(comp)) { + const extendEmits = (raw2) => { + const normalizedFromExtend = normalizeEmitsOptions(raw2, appContext, true); + if (normalizedFromExtend) { + hasExtends = true; + extend(normalized, normalizedFromExtend); + } + }; + if (!asMixin && appContext.mixins.length) { + appContext.mixins.forEach(extendEmits); + } + if (comp.extends) { + extendEmits(comp.extends); + } + if (comp.mixins) { + comp.mixins.forEach(extendEmits); + } + } + if (!raw && !hasExtends) { + if (isObject(comp)) { + cache.set(comp, null); + } + return null; + } + if (isArray(raw)) { + raw.forEach((key) => normalized[key] = null); + } else { + extend(normalized, raw); + } + if (isObject(comp)) { + cache.set(comp, normalized); + } + return normalized; + } + function isEmitListener(options, key) { + if (!options || !isOn(key)) { + return false; + } + key = key.slice(2).replace(/Once$/, ""); + return hasOwn(options, key[0].toLowerCase() + key.slice(1)) || hasOwn(options, hyphenate(key)) || hasOwn(options, key); + } + + let currentRenderingInstance = null; + let currentScopeId = null; + function setCurrentRenderingInstance(instance) { + const prev = currentRenderingInstance; + currentRenderingInstance = instance; + currentScopeId = instance && instance.type.__scopeId || null; + return prev; + } + function pushScopeId(id) { + currentScopeId = id; + } + function popScopeId() { + currentScopeId = null; + } + const withScopeId = (_id) => withCtx; + function withCtx(fn, ctx = currentRenderingInstance, isNonScopedSlot) { + if (!ctx) + return fn; + if (fn._n) { + return fn; + } + const renderFnWithContext = (...args) => { + if (renderFnWithContext._d) { + setBlockTracking(-1); + } + const prevInstance = setCurrentRenderingInstance(ctx); + let res; + try { + res = fn(...args); + } finally { + setCurrentRenderingInstance(prevInstance); + if (renderFnWithContext._d) { + setBlockTracking(1); + } + } + { + devtoolsComponentUpdated(ctx); + } + return res; + }; + renderFnWithContext._n = true; + renderFnWithContext._c = true; + renderFnWithContext._d = true; + return renderFnWithContext; + } + + let accessedAttrs = false; + function markAttrsAccessed() { + accessedAttrs = true; + } + function renderComponentRoot(instance) { + const { + type: Component, + vnode, + proxy, + withProxy, + props, + propsOptions: [propsOptions], + slots, + attrs, + emit, + render, + renderCache, + data, + setupState, + ctx, + inheritAttrs + } = instance; + let result; + let fallthroughAttrs; + const prev = setCurrentRenderingInstance(instance); + { + accessedAttrs = false; + } + try { + if (vnode.shapeFlag & 4) { + const proxyToUse = withProxy || proxy; + result = normalizeVNode( + render.call( + proxyToUse, + proxyToUse, + renderCache, + props, + setupState, + data, + ctx + ) + ); + fallthroughAttrs = attrs; + } else { + const render2 = Component; + if (attrs === props) { + markAttrsAccessed(); + } + result = normalizeVNode( + render2.length > 1 ? render2( + props, + true ? { + get attrs() { + markAttrsAccessed(); + return attrs; + }, + slots, + emit + } : { attrs, slots, emit } + ) : render2( + props, + null + /* we know it doesn't need it */ + ) + ); + fallthroughAttrs = Component.props ? attrs : getFunctionalFallthrough(attrs); + } + } catch (err) { + blockStack.length = 0; + handleError(err, instance, 1); + result = createVNode(Comment); + } + let root = result; + let setRoot = void 0; + if (result.patchFlag > 0 && result.patchFlag & 2048) { + [root, setRoot] = getChildRoot(result); + } + if (fallthroughAttrs && inheritAttrs !== false) { + const keys = Object.keys(fallthroughAttrs); + const { shapeFlag } = root; + if (keys.length) { + if (shapeFlag & (1 | 6)) { + if (propsOptions && keys.some(isModelListener)) { + fallthroughAttrs = filterModelListeners( + fallthroughAttrs, + propsOptions + ); + } + root = cloneVNode(root, fallthroughAttrs); + } else if (!accessedAttrs && root.type !== Comment) { + const allAttrs = Object.keys(attrs); + const eventAttrs = []; + const extraAttrs = []; + for (let i = 0, l = allAttrs.length; i < l; i++) { + const key = allAttrs[i]; + if (isOn(key)) { + if (!isModelListener(key)) { + eventAttrs.push(key[2].toLowerCase() + key.slice(3)); + } + } else { + extraAttrs.push(key); + } + } + if (extraAttrs.length) { + warn( + `Extraneous non-props attributes (${extraAttrs.join(", ")}) were passed to component but could not be automatically inherited because component renders fragment or text root nodes.` + ); + } + if (eventAttrs.length) { + warn( + `Extraneous non-emits event listeners (${eventAttrs.join(", ")}) were passed to component but could not be automatically inherited because component renders fragment or text root nodes. If the listener is intended to be a component custom event listener only, declare it using the "emits" option.` + ); + } + } + } + } + if (vnode.dirs) { + if (!isElementRoot(root)) { + warn( + `Runtime directive used on component with non-element root node. The directives will not function as intended.` + ); + } + root = cloneVNode(root); + root.dirs = root.dirs ? root.dirs.concat(vnode.dirs) : vnode.dirs; + } + if (vnode.transition) { + if (!isElementRoot(root)) { + warn( + `Component inside renders non-element root node that cannot be animated.` + ); + } + root.transition = vnode.transition; + } + if (setRoot) { + setRoot(root); + } else { + result = root; + } + setCurrentRenderingInstance(prev); + return result; + } + const getChildRoot = (vnode) => { + const rawChildren = vnode.children; + const dynamicChildren = vnode.dynamicChildren; + const childRoot = filterSingleRoot(rawChildren); + if (!childRoot) { + return [vnode, void 0]; + } + const index = rawChildren.indexOf(childRoot); + const dynamicIndex = dynamicChildren ? dynamicChildren.indexOf(childRoot) : -1; + const setRoot = (updatedRoot) => { + rawChildren[index] = updatedRoot; + if (dynamicChildren) { + if (dynamicIndex > -1) { + dynamicChildren[dynamicIndex] = updatedRoot; + } else if (updatedRoot.patchFlag > 0) { + vnode.dynamicChildren = [...dynamicChildren, updatedRoot]; + } + } + }; + return [normalizeVNode(childRoot), setRoot]; + }; + function filterSingleRoot(children) { + let singleRoot; + for (let i = 0; i < children.length; i++) { + const child = children[i]; + if (isVNode(child)) { + if (child.type !== Comment || child.children === "v-if") { + if (singleRoot) { + return; + } else { + singleRoot = child; + } + } + } else { + return; + } + } + return singleRoot; + } + const getFunctionalFallthrough = (attrs) => { + let res; + for (const key in attrs) { + if (key === "class" || key === "style" || isOn(key)) { + (res || (res = {}))[key] = attrs[key]; + } + } + return res; + }; + const filterModelListeners = (attrs, props) => { + const res = {}; + for (const key in attrs) { + if (!isModelListener(key) || !(key.slice(9) in props)) { + res[key] = attrs[key]; + } + } + return res; + }; + const isElementRoot = (vnode) => { + return vnode.shapeFlag & (6 | 1) || vnode.type === Comment; + }; + function shouldUpdateComponent(prevVNode, nextVNode, optimized) { + const { props: prevProps, children: prevChildren, component } = prevVNode; + const { props: nextProps, children: nextChildren, patchFlag } = nextVNode; + const emits = component.emitsOptions; + if ((prevChildren || nextChildren) && isHmrUpdating) { + return true; + } + if (nextVNode.dirs || nextVNode.transition) { + return true; + } + if (optimized && patchFlag >= 0) { + if (patchFlag & 1024) { + return true; + } + if (patchFlag & 16) { + if (!prevProps) { + return !!nextProps; + } + return hasPropsChanged(prevProps, nextProps, emits); + } else if (patchFlag & 8) { + const dynamicProps = nextVNode.dynamicProps; + for (let i = 0; i < dynamicProps.length; i++) { + const key = dynamicProps[i]; + if (nextProps[key] !== prevProps[key] && !isEmitListener(emits, key)) { + return true; + } + } + } + } else { + if (prevChildren || nextChildren) { + if (!nextChildren || !nextChildren.$stable) { + return true; + } + } + if (prevProps === nextProps) { + return false; + } + if (!prevProps) { + return !!nextProps; + } + if (!nextProps) { + return true; + } + return hasPropsChanged(prevProps, nextProps, emits); + } + return false; + } + function hasPropsChanged(prevProps, nextProps, emitsOptions) { + const nextKeys = Object.keys(nextProps); + if (nextKeys.length !== Object.keys(prevProps).length) { + return true; + } + for (let i = 0; i < nextKeys.length; i++) { + const key = nextKeys[i]; + if (nextProps[key] !== prevProps[key] && !isEmitListener(emitsOptions, key)) { + return true; + } + } + return false; + } + function updateHOCHostEl({ vnode, parent }, el) { + while (parent && parent.subTree === vnode) { + (vnode = parent.vnode).el = el; + parent = parent.parent; + } + } + + const isSuspense = (type) => type.__isSuspense; + const SuspenseImpl = { + name: "Suspense", + // In order to make Suspense tree-shakable, we need to avoid importing it + // directly in the renderer. The renderer checks for the __isSuspense flag + // on a vnode's type and calls the `process` method, passing in renderer + // internals. + __isSuspense: true, + process(n1, n2, container, anchor, parentComponent, parentSuspense, isSVG, slotScopeIds, optimized, rendererInternals) { + if (n1 == null) { + mountSuspense( + n2, + container, + anchor, + parentComponent, + parentSuspense, + isSVG, + slotScopeIds, + optimized, + rendererInternals + ); + } else { + patchSuspense( + n1, + n2, + container, + anchor, + parentComponent, + isSVG, + slotScopeIds, + optimized, + rendererInternals + ); + } + }, + hydrate: hydrateSuspense, + create: createSuspenseBoundary, + normalize: normalizeSuspenseChildren + }; + const Suspense = SuspenseImpl ; + function triggerEvent(vnode, name) { + const eventListener = vnode.props && vnode.props[name]; + if (isFunction(eventListener)) { + eventListener(); + } + } + function mountSuspense(vnode, container, anchor, parentComponent, parentSuspense, isSVG, slotScopeIds, optimized, rendererInternals) { + const { + p: patch, + o: { createElement } + } = rendererInternals; + const hiddenContainer = createElement("div"); + const suspense = vnode.suspense = createSuspenseBoundary( + vnode, + parentSuspense, + parentComponent, + container, + hiddenContainer, + anchor, + isSVG, + slotScopeIds, + optimized, + rendererInternals + ); + patch( + null, + suspense.pendingBranch = vnode.ssContent, + hiddenContainer, + null, + parentComponent, + suspense, + isSVG, + slotScopeIds + ); + if (suspense.deps > 0) { + triggerEvent(vnode, "onPending"); + triggerEvent(vnode, "onFallback"); + patch( + null, + vnode.ssFallback, + container, + anchor, + parentComponent, + null, + // fallback tree will not have suspense context + isSVG, + slotScopeIds + ); + setActiveBranch(suspense, vnode.ssFallback); + } else { + suspense.resolve(false, true); + } + } + function patchSuspense(n1, n2, container, anchor, parentComponent, isSVG, slotScopeIds, optimized, { p: patch, um: unmount, o: { createElement } }) { + const suspense = n2.suspense = n1.suspense; + suspense.vnode = n2; + n2.el = n1.el; + const newBranch = n2.ssContent; + const newFallback = n2.ssFallback; + const { activeBranch, pendingBranch, isInFallback, isHydrating } = suspense; + if (pendingBranch) { + suspense.pendingBranch = newBranch; + if (isSameVNodeType(newBranch, pendingBranch)) { + patch( + pendingBranch, + newBranch, + suspense.hiddenContainer, + null, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + if (suspense.deps <= 0) { + suspense.resolve(); + } else if (isInFallback) { + patch( + activeBranch, + newFallback, + container, + anchor, + parentComponent, + null, + // fallback tree will not have suspense context + isSVG, + slotScopeIds, + optimized + ); + setActiveBranch(suspense, newFallback); + } + } else { + suspense.pendingId++; + if (isHydrating) { + suspense.isHydrating = false; + suspense.activeBranch = pendingBranch; + } else { + unmount(pendingBranch, parentComponent, suspense); + } + suspense.deps = 0; + suspense.effects.length = 0; + suspense.hiddenContainer = createElement("div"); + if (isInFallback) { + patch( + null, + newBranch, + suspense.hiddenContainer, + null, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + if (suspense.deps <= 0) { + suspense.resolve(); + } else { + patch( + activeBranch, + newFallback, + container, + anchor, + parentComponent, + null, + // fallback tree will not have suspense context + isSVG, + slotScopeIds, + optimized + ); + setActiveBranch(suspense, newFallback); + } + } else if (activeBranch && isSameVNodeType(newBranch, activeBranch)) { + patch( + activeBranch, + newBranch, + container, + anchor, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + suspense.resolve(true); + } else { + patch( + null, + newBranch, + suspense.hiddenContainer, + null, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + if (suspense.deps <= 0) { + suspense.resolve(); + } + } + } + } else { + if (activeBranch && isSameVNodeType(newBranch, activeBranch)) { + patch( + activeBranch, + newBranch, + container, + anchor, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + setActiveBranch(suspense, newBranch); + } else { + triggerEvent(n2, "onPending"); + suspense.pendingBranch = newBranch; + suspense.pendingId++; + patch( + null, + newBranch, + suspense.hiddenContainer, + null, + parentComponent, + suspense, + isSVG, + slotScopeIds, + optimized + ); + if (suspense.deps <= 0) { + suspense.resolve(); + } else { + const { timeout, pendingId } = suspense; + if (timeout > 0) { + setTimeout(() => { + if (suspense.pendingId === pendingId) { + suspense.fallback(newFallback); + } + }, timeout); + } else if (timeout === 0) { + suspense.fallback(newFallback); + } + } + } + } + } + let hasWarned = false; + function createSuspenseBoundary(vnode, parentSuspense, parentComponent, container, hiddenContainer, anchor, isSVG, slotScopeIds, optimized, rendererInternals, isHydrating = false) { + if (!hasWarned) { + hasWarned = true; + console[console.info ? "info" : "log"]( + ` is an experimental feature and its API will likely change.` + ); + } + const { + p: patch, + m: move, + um: unmount, + n: next, + o: { parentNode, remove } + } = rendererInternals; + let parentSuspenseId; + const isSuspensible = isVNodeSuspensible(vnode); + if (isSuspensible) { + if (parentSuspense == null ? void 0 : parentSuspense.pendingBranch) { + parentSuspenseId = parentSuspense.pendingId; + parentSuspense.deps++; + } + } + const timeout = vnode.props ? toNumber(vnode.props.timeout) : void 0; + { + assertNumber(timeout, `Suspense timeout`); + } + const suspense = { + vnode, + parent: parentSuspense, + parentComponent, + isSVG, + container, + hiddenContainer, + anchor, + deps: 0, + pendingId: 0, + timeout: typeof timeout === "number" ? timeout : -1, + activeBranch: null, + pendingBranch: null, + isInFallback: true, + isHydrating, + isUnmounted: false, + effects: [], + resolve(resume = false, sync = false) { + { + if (!resume && !suspense.pendingBranch) { + throw new Error( + `suspense.resolve() is called without a pending branch.` + ); + } + if (suspense.isUnmounted) { + throw new Error( + `suspense.resolve() is called on an already unmounted suspense boundary.` + ); + } + } + const { + vnode: vnode2, + activeBranch, + pendingBranch, + pendingId, + effects, + parentComponent: parentComponent2, + container: container2 + } = suspense; + if (suspense.isHydrating) { + suspense.isHydrating = false; + } else if (!resume) { + const delayEnter = activeBranch && pendingBranch.transition && pendingBranch.transition.mode === "out-in"; + if (delayEnter) { + activeBranch.transition.afterLeave = () => { + if (pendingId === suspense.pendingId) { + move(pendingBranch, container2, anchor2, 0); + } + }; + } + let { anchor: anchor2 } = suspense; + if (activeBranch) { + anchor2 = next(activeBranch); + unmount(activeBranch, parentComponent2, suspense, true); + } + if (!delayEnter) { + move(pendingBranch, container2, anchor2, 0); + } + } + setActiveBranch(suspense, pendingBranch); + suspense.pendingBranch = null; + suspense.isInFallback = false; + let parent = suspense.parent; + let hasUnresolvedAncestor = false; + while (parent) { + if (parent.pendingBranch) { + parent.effects.push(...effects); + hasUnresolvedAncestor = true; + break; + } + parent = parent.parent; + } + if (!hasUnresolvedAncestor) { + queuePostFlushCb(effects); + } + suspense.effects = []; + if (isSuspensible) { + if (parentSuspense && parentSuspense.pendingBranch && parentSuspenseId === parentSuspense.pendingId) { + parentSuspense.deps--; + if (parentSuspense.deps === 0 && !sync) { + parentSuspense.resolve(); + } + } + } + triggerEvent(vnode2, "onResolve"); + }, + fallback(fallbackVNode) { + if (!suspense.pendingBranch) { + return; + } + const { vnode: vnode2, activeBranch, parentComponent: parentComponent2, container: container2, isSVG: isSVG2 } = suspense; + triggerEvent(vnode2, "onFallback"); + const anchor2 = next(activeBranch); + const mountFallback = () => { + if (!suspense.isInFallback) { + return; + } + patch( + null, + fallbackVNode, + container2, + anchor2, + parentComponent2, + null, + // fallback tree will not have suspense context + isSVG2, + slotScopeIds, + optimized + ); + setActiveBranch(suspense, fallbackVNode); + }; + const delayEnter = fallbackVNode.transition && fallbackVNode.transition.mode === "out-in"; + if (delayEnter) { + activeBranch.transition.afterLeave = mountFallback; + } + suspense.isInFallback = true; + unmount( + activeBranch, + parentComponent2, + null, + // no suspense so unmount hooks fire now + true + // shouldRemove + ); + if (!delayEnter) { + mountFallback(); + } + }, + move(container2, anchor2, type) { + suspense.activeBranch && move(suspense.activeBranch, container2, anchor2, type); + suspense.container = container2; + }, + next() { + return suspense.activeBranch && next(suspense.activeBranch); + }, + registerDep(instance, setupRenderEffect) { + const isInPendingSuspense = !!suspense.pendingBranch; + if (isInPendingSuspense) { + suspense.deps++; + } + const hydratedEl = instance.vnode.el; + instance.asyncDep.catch((err) => { + handleError(err, instance, 0); + }).then((asyncSetupResult) => { + if (instance.isUnmounted || suspense.isUnmounted || suspense.pendingId !== instance.suspenseId) { + return; + } + instance.asyncResolved = true; + const { vnode: vnode2 } = instance; + { + pushWarningContext(vnode2); + } + handleSetupResult(instance, asyncSetupResult, false); + if (hydratedEl) { + vnode2.el = hydratedEl; + } + const placeholder = !hydratedEl && instance.subTree.el; + setupRenderEffect( + instance, + vnode2, + // component may have been moved before resolve. + // if this is not a hydration, instance.subTree will be the comment + // placeholder. + parentNode(hydratedEl || instance.subTree.el), + // anchor will not be used if this is hydration, so only need to + // consider the comment placeholder case. + hydratedEl ? null : next(instance.subTree), + suspense, + isSVG, + optimized + ); + if (placeholder) { + remove(placeholder); + } + updateHOCHostEl(instance, vnode2.el); + { + popWarningContext(); + } + if (isInPendingSuspense && --suspense.deps === 0) { + suspense.resolve(); + } + }); + }, + unmount(parentSuspense2, doRemove) { + suspense.isUnmounted = true; + if (suspense.activeBranch) { + unmount( + suspense.activeBranch, + parentComponent, + parentSuspense2, + doRemove + ); + } + if (suspense.pendingBranch) { + unmount( + suspense.pendingBranch, + parentComponent, + parentSuspense2, + doRemove + ); + } + } + }; + return suspense; + } + function hydrateSuspense(node, vnode, parentComponent, parentSuspense, isSVG, slotScopeIds, optimized, rendererInternals, hydrateNode) { + const suspense = vnode.suspense = createSuspenseBoundary( + vnode, + parentSuspense, + parentComponent, + node.parentNode, + document.createElement("div"), + null, + isSVG, + slotScopeIds, + optimized, + rendererInternals, + true + /* hydrating */ + ); + const result = hydrateNode( + node, + suspense.pendingBranch = vnode.ssContent, + parentComponent, + suspense, + slotScopeIds, + optimized + ); + if (suspense.deps === 0) { + suspense.resolve(false, true); + } + return result; + } + function normalizeSuspenseChildren(vnode) { + const { shapeFlag, children } = vnode; + const isSlotChildren = shapeFlag & 32; + vnode.ssContent = normalizeSuspenseSlot( + isSlotChildren ? children.default : children + ); + vnode.ssFallback = isSlotChildren ? normalizeSuspenseSlot(children.fallback) : createVNode(Comment); + } + function normalizeSuspenseSlot(s) { + let block; + if (isFunction(s)) { + const trackBlock = isBlockTreeEnabled && s._c; + if (trackBlock) { + s._d = false; + openBlock(); + } + s = s(); + if (trackBlock) { + s._d = true; + block = currentBlock; + closeBlock(); + } + } + if (isArray(s)) { + const singleChild = filterSingleRoot(s); + if (!singleChild) { + warn(` slots expect a single root node.`); + } + s = singleChild; + } + s = normalizeVNode(s); + if (block && !s.dynamicChildren) { + s.dynamicChildren = block.filter((c) => c !== s); + } + return s; + } + function queueEffectWithSuspense(fn, suspense) { + if (suspense && suspense.pendingBranch) { + if (isArray(fn)) { + suspense.effects.push(...fn); + } else { + suspense.effects.push(fn); + } + } else { + queuePostFlushCb(fn); + } + } + function setActiveBranch(suspense, branch) { + suspense.activeBranch = branch; + const { vnode, parentComponent } = suspense; + const el = vnode.el = branch.el; + if (parentComponent && parentComponent.subTree === vnode) { + parentComponent.vnode.el = el; + updateHOCHostEl(parentComponent, el); + } + } + function isVNodeSuspensible(vnode) { + var _a; + return ((_a = vnode.props) == null ? void 0 : _a.suspensible) != null && vnode.props.suspensible !== false; + } + + function watchEffect(effect, options) { + return doWatch(effect, null, options); + } + function watchPostEffect(effect, options) { + return doWatch( + effect, + null, + extend({}, options, { flush: "post" }) + ); + } + function watchSyncEffect(effect, options) { + return doWatch( + effect, + null, + extend({}, options, { flush: "sync" }) + ); + } + const INITIAL_WATCHER_VALUE = {}; + function watch(source, cb, options) { + if (!isFunction(cb)) { + warn( + `\`watch(fn, options?)\` signature has been moved to a separate API. Use \`watchEffect(fn, options?)\` instead. \`watch\` now only supports \`watch(source, cb, options?) signature.` + ); + } + return doWatch(source, cb, options); + } + function doWatch(source, cb, { immediate, deep, flush, onTrack, onTrigger } = EMPTY_OBJ) { + var _a; + if (!cb) { + if (immediate !== void 0) { + warn( + `watch() "immediate" option is only respected when using the watch(source, callback, options?) signature.` + ); + } + if (deep !== void 0) { + warn( + `watch() "deep" option is only respected when using the watch(source, callback, options?) signature.` + ); + } + } + const warnInvalidSource = (s) => { + warn( + `Invalid watch source: `, + s, + `A watch source can only be a getter/effect function, a ref, a reactive object, or an array of these types.` + ); + }; + const instance = getCurrentScope() === ((_a = currentInstance) == null ? void 0 : _a.scope) ? currentInstance : null; + let getter; + let forceTrigger = false; + let isMultiSource = false; + if (isRef(source)) { + getter = () => source.value; + forceTrigger = isShallow(source); + } else if (isReactive(source)) { + getter = () => source; + deep = true; + } else if (isArray(source)) { + isMultiSource = true; + forceTrigger = source.some((s) => isReactive(s) || isShallow(s)); + getter = () => source.map((s) => { + if (isRef(s)) { + return s.value; + } else if (isReactive(s)) { + return traverse(s); + } else if (isFunction(s)) { + return callWithErrorHandling(s, instance, 2); + } else { + warnInvalidSource(s); + } + }); + } else if (isFunction(source)) { + if (cb) { + getter = () => callWithErrorHandling(source, instance, 2); + } else { + getter = () => { + if (instance && instance.isUnmounted) { + return; + } + if (cleanup) { + cleanup(); + } + return callWithAsyncErrorHandling( + source, + instance, + 3, + [onCleanup] + ); + }; + } + } else { + getter = NOOP; + warnInvalidSource(source); + } + if (cb && deep) { + const baseGetter = getter; + getter = () => traverse(baseGetter()); + } + let cleanup; + let onCleanup = (fn) => { + cleanup = effect.onStop = () => { + callWithErrorHandling(fn, instance, 4); + }; + }; + let oldValue = isMultiSource ? new Array(source.length).fill(INITIAL_WATCHER_VALUE) : INITIAL_WATCHER_VALUE; + const job = () => { + if (!effect.active) { + return; + } + if (cb) { + const newValue = effect.run(); + if (deep || forceTrigger || (isMultiSource ? newValue.some( + (v, i) => hasChanged(v, oldValue[i]) + ) : hasChanged(newValue, oldValue)) || false) { + if (cleanup) { + cleanup(); + } + callWithAsyncErrorHandling(cb, instance, 3, [ + newValue, + // pass undefined as the old value when it's changed for the first time + oldValue === INITIAL_WATCHER_VALUE ? void 0 : isMultiSource && oldValue[0] === INITIAL_WATCHER_VALUE ? [] : oldValue, + onCleanup + ]); + oldValue = newValue; + } + } else { + effect.run(); + } + }; + job.allowRecurse = !!cb; + let scheduler; + if (flush === "sync") { + scheduler = job; + } else if (flush === "post") { + scheduler = () => queuePostRenderEffect(job, instance && instance.suspense); + } else { + job.pre = true; + if (instance) + job.id = instance.uid; + scheduler = () => queueJob(job); + } + const effect = new ReactiveEffect(getter, scheduler); + { + effect.onTrack = onTrack; + effect.onTrigger = onTrigger; + } + if (cb) { + if (immediate) { + job(); + } else { + oldValue = effect.run(); + } + } else if (flush === "post") { + queuePostRenderEffect( + effect.run.bind(effect), + instance && instance.suspense + ); + } else { + effect.run(); + } + const unwatch = () => { + effect.stop(); + if (instance && instance.scope) { + remove(instance.scope.effects, effect); + } + }; + return unwatch; + } + function instanceWatch(source, value, options) { + const publicThis = this.proxy; + const getter = isString(source) ? source.includes(".") ? createPathGetter(publicThis, source) : () => publicThis[source] : source.bind(publicThis, publicThis); + let cb; + if (isFunction(value)) { + cb = value; + } else { + cb = value.handler; + options = value; + } + const cur = currentInstance; + setCurrentInstance(this); + const res = doWatch(getter, cb.bind(publicThis), options); + if (cur) { + setCurrentInstance(cur); + } else { + unsetCurrentInstance(); + } + return res; + } + function createPathGetter(ctx, path) { + const segments = path.split("."); + return () => { + let cur = ctx; + for (let i = 0; i < segments.length && cur; i++) { + cur = cur[segments[i]]; + } + return cur; + }; + } + function traverse(value, seen) { + if (!isObject(value) || value["__v_skip"]) { + return value; + } + seen = seen || /* @__PURE__ */ new Set(); + if (seen.has(value)) { + return value; + } + seen.add(value); + if (isRef(value)) { + traverse(value.value, seen); + } else if (isArray(value)) { + for (let i = 0; i < value.length; i++) { + traverse(value[i], seen); + } + } else if (isSet(value) || isMap(value)) { + value.forEach((v) => { + traverse(v, seen); + }); + } else if (isPlainObject(value)) { + for (const key in value) { + traverse(value[key], seen); + } + } + return value; + } + + function validateDirectiveName(name) { + if (isBuiltInDirective(name)) { + warn("Do not use built-in directive ids as custom directive id: " + name); + } + } + function withDirectives(vnode, directives) { + const internalInstance = currentRenderingInstance; + if (internalInstance === null) { + warn(`withDirectives can only be used inside render functions.`); + return vnode; + } + const instance = getExposeProxy(internalInstance) || internalInstance.proxy; + const bindings = vnode.dirs || (vnode.dirs = []); + for (let i = 0; i < directives.length; i++) { + let [dir, value, arg, modifiers = EMPTY_OBJ] = directives[i]; + if (dir) { + if (isFunction(dir)) { + dir = { + mounted: dir, + updated: dir + }; + } + if (dir.deep) { + traverse(value); + } + bindings.push({ + dir, + instance, + value, + oldValue: void 0, + arg, + modifiers + }); + } + } + return vnode; + } + function invokeDirectiveHook(vnode, prevVNode, instance, name) { + const bindings = vnode.dirs; + const oldBindings = prevVNode && prevVNode.dirs; + for (let i = 0; i < bindings.length; i++) { + const binding = bindings[i]; + if (oldBindings) { + binding.oldValue = oldBindings[i].value; + } + let hook = binding.dir[name]; + if (hook) { + pauseTracking(); + callWithAsyncErrorHandling(hook, instance, 8, [ + vnode.el, + binding, + vnode, + prevVNode + ]); + resetTracking(); + } + } + } + + function useTransitionState() { + const state = { + isMounted: false, + isLeaving: false, + isUnmounting: false, + leavingVNodes: /* @__PURE__ */ new Map() + }; + onMounted(() => { + state.isMounted = true; + }); + onBeforeUnmount(() => { + state.isUnmounting = true; + }); + return state; + } + const TransitionHookValidator = [Function, Array]; + const BaseTransitionPropsValidators = { + mode: String, + appear: Boolean, + persisted: Boolean, + // enter + onBeforeEnter: TransitionHookValidator, + onEnter: TransitionHookValidator, + onAfterEnter: TransitionHookValidator, + onEnterCancelled: TransitionHookValidator, + // leave + onBeforeLeave: TransitionHookValidator, + onLeave: TransitionHookValidator, + onAfterLeave: TransitionHookValidator, + onLeaveCancelled: TransitionHookValidator, + // appear + onBeforeAppear: TransitionHookValidator, + onAppear: TransitionHookValidator, + onAfterAppear: TransitionHookValidator, + onAppearCancelled: TransitionHookValidator + }; + const BaseTransitionImpl = { + name: `BaseTransition`, + props: BaseTransitionPropsValidators, + setup(props, { slots }) { + const instance = getCurrentInstance(); + const state = useTransitionState(); + let prevTransitionKey; + return () => { + const children = slots.default && getTransitionRawChildren(slots.default(), true); + if (!children || !children.length) { + return; + } + let child = children[0]; + if (children.length > 1) { + let hasFound = false; + for (const c of children) { + if (c.type !== Comment) { + if (hasFound) { + warn( + " can only be used on a single element or component. Use for lists." + ); + break; + } + child = c; + hasFound = true; + } + } + } + const rawProps = toRaw(props); + const { mode } = rawProps; + if (mode && mode !== "in-out" && mode !== "out-in" && mode !== "default") { + warn(`invalid mode: ${mode}`); + } + if (state.isLeaving) { + return emptyPlaceholder(child); + } + const innerChild = getKeepAliveChild(child); + if (!innerChild) { + return emptyPlaceholder(child); + } + const enterHooks = resolveTransitionHooks( + innerChild, + rawProps, + state, + instance + ); + setTransitionHooks(innerChild, enterHooks); + const oldChild = instance.subTree; + const oldInnerChild = oldChild && getKeepAliveChild(oldChild); + let transitionKeyChanged = false; + const { getTransitionKey } = innerChild.type; + if (getTransitionKey) { + const key = getTransitionKey(); + if (prevTransitionKey === void 0) { + prevTransitionKey = key; + } else if (key !== prevTransitionKey) { + prevTransitionKey = key; + transitionKeyChanged = true; + } + } + if (oldInnerChild && oldInnerChild.type !== Comment && (!isSameVNodeType(innerChild, oldInnerChild) || transitionKeyChanged)) { + const leavingHooks = resolveTransitionHooks( + oldInnerChild, + rawProps, + state, + instance + ); + setTransitionHooks(oldInnerChild, leavingHooks); + if (mode === "out-in") { + state.isLeaving = true; + leavingHooks.afterLeave = () => { + state.isLeaving = false; + if (instance.update.active !== false) { + instance.update(); + } + }; + return emptyPlaceholder(child); + } else if (mode === "in-out" && innerChild.type !== Comment) { + leavingHooks.delayLeave = (el, earlyRemove, delayedLeave) => { + const leavingVNodesCache = getLeavingNodesForType( + state, + oldInnerChild + ); + leavingVNodesCache[String(oldInnerChild.key)] = oldInnerChild; + el._leaveCb = () => { + earlyRemove(); + el._leaveCb = void 0; + delete enterHooks.delayedLeave; + }; + enterHooks.delayedLeave = delayedLeave; + }; + } + } + return child; + }; + } + }; + const BaseTransition = BaseTransitionImpl; + function getLeavingNodesForType(state, vnode) { + const { leavingVNodes } = state; + let leavingVNodesCache = leavingVNodes.get(vnode.type); + if (!leavingVNodesCache) { + leavingVNodesCache = /* @__PURE__ */ Object.create(null); + leavingVNodes.set(vnode.type, leavingVNodesCache); + } + return leavingVNodesCache; + } + function resolveTransitionHooks(vnode, props, state, instance) { + const { + appear, + mode, + persisted = false, + onBeforeEnter, + onEnter, + onAfterEnter, + onEnterCancelled, + onBeforeLeave, + onLeave, + onAfterLeave, + onLeaveCancelled, + onBeforeAppear, + onAppear, + onAfterAppear, + onAppearCancelled + } = props; + const key = String(vnode.key); + const leavingVNodesCache = getLeavingNodesForType(state, vnode); + const callHook = (hook, args) => { + hook && callWithAsyncErrorHandling( + hook, + instance, + 9, + args + ); + }; + const callAsyncHook = (hook, args) => { + const done = args[1]; + callHook(hook, args); + if (isArray(hook)) { + if (hook.every((hook2) => hook2.length <= 1)) + done(); + } else if (hook.length <= 1) { + done(); + } + }; + const hooks = { + mode, + persisted, + beforeEnter(el) { + let hook = onBeforeEnter; + if (!state.isMounted) { + if (appear) { + hook = onBeforeAppear || onBeforeEnter; + } else { + return; + } + } + if (el._leaveCb) { + el._leaveCb( + true + /* cancelled */ + ); + } + const leavingVNode = leavingVNodesCache[key]; + if (leavingVNode && isSameVNodeType(vnode, leavingVNode) && leavingVNode.el._leaveCb) { + leavingVNode.el._leaveCb(); + } + callHook(hook, [el]); + }, + enter(el) { + let hook = onEnter; + let afterHook = onAfterEnter; + let cancelHook = onEnterCancelled; + if (!state.isMounted) { + if (appear) { + hook = onAppear || onEnter; + afterHook = onAfterAppear || onAfterEnter; + cancelHook = onAppearCancelled || onEnterCancelled; + } else { + return; + } + } + let called = false; + const done = el._enterCb = (cancelled) => { + if (called) + return; + called = true; + if (cancelled) { + callHook(cancelHook, [el]); + } else { + callHook(afterHook, [el]); + } + if (hooks.delayedLeave) { + hooks.delayedLeave(); + } + el._enterCb = void 0; + }; + if (hook) { + callAsyncHook(hook, [el, done]); + } else { + done(); + } + }, + leave(el, remove) { + const key2 = String(vnode.key); + if (el._enterCb) { + el._enterCb( + true + /* cancelled */ + ); + } + if (state.isUnmounting) { + return remove(); + } + callHook(onBeforeLeave, [el]); + let called = false; + const done = el._leaveCb = (cancelled) => { + if (called) + return; + called = true; + remove(); + if (cancelled) { + callHook(onLeaveCancelled, [el]); + } else { + callHook(onAfterLeave, [el]); + } + el._leaveCb = void 0; + if (leavingVNodesCache[key2] === vnode) { + delete leavingVNodesCache[key2]; + } + }; + leavingVNodesCache[key2] = vnode; + if (onLeave) { + callAsyncHook(onLeave, [el, done]); + } else { + done(); + } + }, + clone(vnode2) { + return resolveTransitionHooks(vnode2, props, state, instance); + } + }; + return hooks; + } + function emptyPlaceholder(vnode) { + if (isKeepAlive(vnode)) { + vnode = cloneVNode(vnode); + vnode.children = null; + return vnode; + } + } + function getKeepAliveChild(vnode) { + return isKeepAlive(vnode) ? vnode.children ? vnode.children[0] : void 0 : vnode; + } + function setTransitionHooks(vnode, hooks) { + if (vnode.shapeFlag & 6 && vnode.component) { + setTransitionHooks(vnode.component.subTree, hooks); + } else if (vnode.shapeFlag & 128) { + vnode.ssContent.transition = hooks.clone(vnode.ssContent); + vnode.ssFallback.transition = hooks.clone(vnode.ssFallback); + } else { + vnode.transition = hooks; + } + } + function getTransitionRawChildren(children, keepComment = false, parentKey) { + let ret = []; + let keyedFragmentCount = 0; + for (let i = 0; i < children.length; i++) { + let child = children[i]; + const key = parentKey == null ? child.key : String(parentKey) + String(child.key != null ? child.key : i); + if (child.type === Fragment) { + if (child.patchFlag & 128) + keyedFragmentCount++; + ret = ret.concat( + getTransitionRawChildren(child.children, keepComment, key) + ); + } else if (keepComment || child.type !== Comment) { + ret.push(key != null ? cloneVNode(child, { key }) : child); + } + } + if (keyedFragmentCount > 1) { + for (let i = 0; i < ret.length; i++) { + ret[i].patchFlag = -2; + } + } + return ret; + } + + function defineComponent(options, extraOptions) { + return isFunction(options) ? ( + // #8326: extend call and options.name access are considered side-effects + // by Rollup, so we have to wrap it in a pure-annotated IIFE. + /* @__PURE__ */ (() => extend({ name: options.name }, extraOptions, { setup: options }))() + ) : options; + } + + const isAsyncWrapper = (i) => !!i.type.__asyncLoader; + function defineAsyncComponent(source) { + if (isFunction(source)) { + source = { loader: source }; + } + const { + loader, + loadingComponent, + errorComponent, + delay = 200, + timeout, + // undefined = never times out + suspensible = true, + onError: userOnError + } = source; + let pendingRequest = null; + let resolvedComp; + let retries = 0; + const retry = () => { + retries++; + pendingRequest = null; + return load(); + }; + const load = () => { + let thisRequest; + return pendingRequest || (thisRequest = pendingRequest = loader().catch((err) => { + err = err instanceof Error ? err : new Error(String(err)); + if (userOnError) { + return new Promise((resolve, reject) => { + const userRetry = () => resolve(retry()); + const userFail = () => reject(err); + userOnError(err, userRetry, userFail, retries + 1); + }); + } else { + throw err; + } + }).then((comp) => { + if (thisRequest !== pendingRequest && pendingRequest) { + return pendingRequest; + } + if (!comp) { + warn( + `Async component loader resolved to undefined. If you are using retry(), make sure to return its return value.` + ); + } + if (comp && (comp.__esModule || comp[Symbol.toStringTag] === "Module")) { + comp = comp.default; + } + if (comp && !isObject(comp) && !isFunction(comp)) { + throw new Error(`Invalid async component load result: ${comp}`); + } + resolvedComp = comp; + return comp; + })); + }; + return defineComponent({ + name: "AsyncComponentWrapper", + __asyncLoader: load, + get __asyncResolved() { + return resolvedComp; + }, + setup() { + const instance = currentInstance; + if (resolvedComp) { + return () => createInnerComp(resolvedComp, instance); + } + const onError = (err) => { + pendingRequest = null; + handleError( + err, + instance, + 13, + !errorComponent + /* do not throw in dev if user provided error component */ + ); + }; + if (suspensible && instance.suspense || false) { + return load().then((comp) => { + return () => createInnerComp(comp, instance); + }).catch((err) => { + onError(err); + return () => errorComponent ? createVNode(errorComponent, { + error: err + }) : null; + }); + } + const loaded = ref(false); + const error = ref(); + const delayed = ref(!!delay); + if (delay) { + setTimeout(() => { + delayed.value = false; + }, delay); + } + if (timeout != null) { + setTimeout(() => { + if (!loaded.value && !error.value) { + const err = new Error( + `Async component timed out after ${timeout}ms.` + ); + onError(err); + error.value = err; + } + }, timeout); + } + load().then(() => { + loaded.value = true; + if (instance.parent && isKeepAlive(instance.parent.vnode)) { + queueJob(instance.parent.update); + } + }).catch((err) => { + onError(err); + error.value = err; + }); + return () => { + if (loaded.value && resolvedComp) { + return createInnerComp(resolvedComp, instance); + } else if (error.value && errorComponent) { + return createVNode(errorComponent, { + error: error.value + }); + } else if (loadingComponent && !delayed.value) { + return createVNode(loadingComponent); + } + }; + } + }); + } + function createInnerComp(comp, parent) { + const { ref: ref2, props, children, ce } = parent.vnode; + const vnode = createVNode(comp, props, children); + vnode.ref = ref2; + vnode.ce = ce; + delete parent.vnode.ce; + return vnode; + } + + const isKeepAlive = (vnode) => vnode.type.__isKeepAlive; + const KeepAliveImpl = { + name: `KeepAlive`, + // Marker for special handling inside the renderer. We are not using a === + // check directly on KeepAlive in the renderer, because importing it directly + // would prevent it from being tree-shaken. + __isKeepAlive: true, + props: { + include: [String, RegExp, Array], + exclude: [String, RegExp, Array], + max: [String, Number] + }, + setup(props, { slots }) { + const instance = getCurrentInstance(); + const sharedContext = instance.ctx; + const cache = /* @__PURE__ */ new Map(); + const keys = /* @__PURE__ */ new Set(); + let current = null; + { + instance.__v_cache = cache; + } + const parentSuspense = instance.suspense; + const { + renderer: { + p: patch, + m: move, + um: _unmount, + o: { createElement } + } + } = sharedContext; + const storageContainer = createElement("div"); + sharedContext.activate = (vnode, container, anchor, isSVG, optimized) => { + const instance2 = vnode.component; + move(vnode, container, anchor, 0, parentSuspense); + patch( + instance2.vnode, + vnode, + container, + anchor, + instance2, + parentSuspense, + isSVG, + vnode.slotScopeIds, + optimized + ); + queuePostRenderEffect(() => { + instance2.isDeactivated = false; + if (instance2.a) { + invokeArrayFns(instance2.a); + } + const vnodeHook = vnode.props && vnode.props.onVnodeMounted; + if (vnodeHook) { + invokeVNodeHook(vnodeHook, instance2.parent, vnode); + } + }, parentSuspense); + { + devtoolsComponentAdded(instance2); + } + }; + sharedContext.deactivate = (vnode) => { + const instance2 = vnode.component; + move(vnode, storageContainer, null, 1, parentSuspense); + queuePostRenderEffect(() => { + if (instance2.da) { + invokeArrayFns(instance2.da); + } + const vnodeHook = vnode.props && vnode.props.onVnodeUnmounted; + if (vnodeHook) { + invokeVNodeHook(vnodeHook, instance2.parent, vnode); + } + instance2.isDeactivated = true; + }, parentSuspense); + { + devtoolsComponentAdded(instance2); + } + }; + function unmount(vnode) { + resetShapeFlag(vnode); + _unmount(vnode, instance, parentSuspense, true); + } + function pruneCache(filter) { + cache.forEach((vnode, key) => { + const name = getComponentName(vnode.type); + if (name && (!filter || !filter(name))) { + pruneCacheEntry(key); + } + }); + } + function pruneCacheEntry(key) { + const cached = cache.get(key); + if (!current || !isSameVNodeType(cached, current)) { + unmount(cached); + } else if (current) { + resetShapeFlag(current); + } + cache.delete(key); + keys.delete(key); + } + watch( + () => [props.include, props.exclude], + ([include, exclude]) => { + include && pruneCache((name) => matches(include, name)); + exclude && pruneCache((name) => !matches(exclude, name)); + }, + // prune post-render after `current` has been updated + { flush: "post", deep: true } + ); + let pendingCacheKey = null; + const cacheSubtree = () => { + if (pendingCacheKey != null) { + cache.set(pendingCacheKey, getInnerChild(instance.subTree)); + } + }; + onMounted(cacheSubtree); + onUpdated(cacheSubtree); + onBeforeUnmount(() => { + cache.forEach((cached) => { + const { subTree, suspense } = instance; + const vnode = getInnerChild(subTree); + if (cached.type === vnode.type && cached.key === vnode.key) { + resetShapeFlag(vnode); + const da = vnode.component.da; + da && queuePostRenderEffect(da, suspense); + return; + } + unmount(cached); + }); + }); + return () => { + pendingCacheKey = null; + if (!slots.default) { + return null; + } + const children = slots.default(); + const rawVNode = children[0]; + if (children.length > 1) { + { + warn(`KeepAlive should contain exactly one component child.`); + } + current = null; + return children; + } else if (!isVNode(rawVNode) || !(rawVNode.shapeFlag & 4) && !(rawVNode.shapeFlag & 128)) { + current = null; + return rawVNode; + } + let vnode = getInnerChild(rawVNode); + const comp = vnode.type; + const name = getComponentName( + isAsyncWrapper(vnode) ? vnode.type.__asyncResolved || {} : comp + ); + const { include, exclude, max } = props; + if (include && (!name || !matches(include, name)) || exclude && name && matches(exclude, name)) { + current = vnode; + return rawVNode; + } + const key = vnode.key == null ? comp : vnode.key; + const cachedVNode = cache.get(key); + if (vnode.el) { + vnode = cloneVNode(vnode); + if (rawVNode.shapeFlag & 128) { + rawVNode.ssContent = vnode; + } + } + pendingCacheKey = key; + if (cachedVNode) { + vnode.el = cachedVNode.el; + vnode.component = cachedVNode.component; + if (vnode.transition) { + setTransitionHooks(vnode, vnode.transition); + } + vnode.shapeFlag |= 512; + keys.delete(key); + keys.add(key); + } else { + keys.add(key); + if (max && keys.size > parseInt(max, 10)) { + pruneCacheEntry(keys.values().next().value); + } + } + vnode.shapeFlag |= 256; + current = vnode; + return isSuspense(rawVNode.type) ? rawVNode : vnode; + }; + } + }; + const KeepAlive = KeepAliveImpl; + function matches(pattern, name) { + if (isArray(pattern)) { + return pattern.some((p) => matches(p, name)); + } else if (isString(pattern)) { + return pattern.split(",").includes(name); + } else if (isRegExp(pattern)) { + return pattern.test(name); + } + return false; + } + function onActivated(hook, target) { + registerKeepAliveHook(hook, "a", target); + } + function onDeactivated(hook, target) { + registerKeepAliveHook(hook, "da", target); + } + function registerKeepAliveHook(hook, type, target = currentInstance) { + const wrappedHook = hook.__wdc || (hook.__wdc = () => { + let current = target; + while (current) { + if (current.isDeactivated) { + return; + } + current = current.parent; + } + return hook(); + }); + injectHook(type, wrappedHook, target); + if (target) { + let current = target.parent; + while (current && current.parent) { + if (isKeepAlive(current.parent.vnode)) { + injectToKeepAliveRoot(wrappedHook, type, target, current); + } + current = current.parent; + } + } + } + function injectToKeepAliveRoot(hook, type, target, keepAliveRoot) { + const injected = injectHook( + type, + hook, + keepAliveRoot, + true + /* prepend */ + ); + onUnmounted(() => { + remove(keepAliveRoot[type], injected); + }, target); + } + function resetShapeFlag(vnode) { + vnode.shapeFlag &= ~256; + vnode.shapeFlag &= ~512; + } + function getInnerChild(vnode) { + return vnode.shapeFlag & 128 ? vnode.ssContent : vnode; + } + + function injectHook(type, hook, target = currentInstance, prepend = false) { + if (target) { + const hooks = target[type] || (target[type] = []); + const wrappedHook = hook.__weh || (hook.__weh = (...args) => { + if (target.isUnmounted) { + return; + } + pauseTracking(); + setCurrentInstance(target); + const res = callWithAsyncErrorHandling(hook, target, type, args); + unsetCurrentInstance(); + resetTracking(); + return res; + }); + if (prepend) { + hooks.unshift(wrappedHook); + } else { + hooks.push(wrappedHook); + } + return wrappedHook; + } else { + const apiName = toHandlerKey(ErrorTypeStrings[type].replace(/ hook$/, "")); + warn( + `${apiName} is called when there is no active component instance to be associated with. Lifecycle injection APIs can only be used during execution of setup().` + (` If you are using async setup(), make sure to register lifecycle hooks before the first await statement.` ) + ); + } + } + const createHook = (lifecycle) => (hook, target = currentInstance) => ( + // post-create lifecycle registrations are noops during SSR (except for serverPrefetch) + (!isInSSRComponentSetup || lifecycle === "sp") && injectHook(lifecycle, (...args) => hook(...args), target) + ); + const onBeforeMount = createHook("bm"); + const onMounted = createHook("m"); + const onBeforeUpdate = createHook("bu"); + const onUpdated = createHook("u"); + const onBeforeUnmount = createHook("bum"); + const onUnmounted = createHook("um"); + const onServerPrefetch = createHook("sp"); + const onRenderTriggered = createHook( + "rtg" + ); + const onRenderTracked = createHook( + "rtc" + ); + function onErrorCaptured(hook, target = currentInstance) { + injectHook("ec", hook, target); + } + + const COMPONENTS = "components"; + const DIRECTIVES = "directives"; + function resolveComponent(name, maybeSelfReference) { + return resolveAsset(COMPONENTS, name, true, maybeSelfReference) || name; + } + const NULL_DYNAMIC_COMPONENT = Symbol.for("v-ndc"); + function resolveDynamicComponent(component) { + if (isString(component)) { + return resolveAsset(COMPONENTS, component, false) || component; + } else { + return component || NULL_DYNAMIC_COMPONENT; + } + } + function resolveDirective(name) { + return resolveAsset(DIRECTIVES, name); + } + function resolveAsset(type, name, warnMissing = true, maybeSelfReference = false) { + const instance = currentRenderingInstance || currentInstance; + if (instance) { + const Component = instance.type; + if (type === COMPONENTS) { + const selfName = getComponentName( + Component, + false + /* do not include inferred name to avoid breaking existing code */ + ); + if (selfName && (selfName === name || selfName === camelize(name) || selfName === capitalize(camelize(name)))) { + return Component; + } + } + const res = ( + // local registration + // check instance[type] first which is resolved for options API + resolve(instance[type] || Component[type], name) || // global registration + resolve(instance.appContext[type], name) + ); + if (!res && maybeSelfReference) { + return Component; + } + if (warnMissing && !res) { + const extra = type === COMPONENTS ? ` +If this is a native custom element, make sure to exclude it from component resolution via compilerOptions.isCustomElement.` : ``; + warn(`Failed to resolve ${type.slice(0, -1)}: ${name}${extra}`); + } + return res; + } else { + warn( + `resolve${capitalize(type.slice(0, -1))} can only be used in render() or setup().` + ); + } + } + function resolve(registry, name) { + return registry && (registry[name] || registry[camelize(name)] || registry[capitalize(camelize(name))]); + } + + function renderList(source, renderItem, cache, index) { + let ret; + const cached = cache && cache[index]; + if (isArray(source) || isString(source)) { + ret = new Array(source.length); + for (let i = 0, l = source.length; i < l; i++) { + ret[i] = renderItem(source[i], i, void 0, cached && cached[i]); + } + } else if (typeof source === "number") { + if (!Number.isInteger(source)) { + warn(`The v-for range expect an integer value but got ${source}.`); + } + ret = new Array(source); + for (let i = 0; i < source; i++) { + ret[i] = renderItem(i + 1, i, void 0, cached && cached[i]); + } + } else if (isObject(source)) { + if (source[Symbol.iterator]) { + ret = Array.from( + source, + (item, i) => renderItem(item, i, void 0, cached && cached[i]) + ); + } else { + const keys = Object.keys(source); + ret = new Array(keys.length); + for (let i = 0, l = keys.length; i < l; i++) { + const key = keys[i]; + ret[i] = renderItem(source[key], key, i, cached && cached[i]); + } + } + } else { + ret = []; + } + if (cache) { + cache[index] = ret; + } + return ret; + } + + function createSlots(slots, dynamicSlots) { + for (let i = 0; i < dynamicSlots.length; i++) { + const slot = dynamicSlots[i]; + if (isArray(slot)) { + for (let j = 0; j < slot.length; j++) { + slots[slot[j].name] = slot[j].fn; + } + } else if (slot) { + slots[slot.name] = slot.key ? (...args) => { + const res = slot.fn(...args); + if (res) + res.key = slot.key; + return res; + } : slot.fn; + } + } + return slots; + } + + function renderSlot(slots, name, props = {}, fallback, noSlotted) { + if (currentRenderingInstance.isCE || currentRenderingInstance.parent && isAsyncWrapper(currentRenderingInstance.parent) && currentRenderingInstance.parent.isCE) { + if (name !== "default") + props.name = name; + return createVNode("slot", props, fallback && fallback()); + } + let slot = slots[name]; + if (slot && slot.length > 1) { + warn( + `SSR-optimized slot function detected in a non-SSR-optimized render function. You need to mark this component with $dynamic-slots in the parent template.` + ); + slot = () => []; + } + if (slot && slot._c) { + slot._d = false; + } + openBlock(); + const validSlotContent = slot && ensureValidVNode(slot(props)); + const rendered = createBlock( + Fragment, + { + key: props.key || // slot content array of a dynamic conditional slot may have a branch + // key attached in the `createSlots` helper, respect that + validSlotContent && validSlotContent.key || `_${name}` + }, + validSlotContent || (fallback ? fallback() : []), + validSlotContent && slots._ === 1 ? 64 : -2 + ); + if (!noSlotted && rendered.scopeId) { + rendered.slotScopeIds = [rendered.scopeId + "-s"]; + } + if (slot && slot._c) { + slot._d = true; + } + return rendered; + } + function ensureValidVNode(vnodes) { + return vnodes.some((child) => { + if (!isVNode(child)) + return true; + if (child.type === Comment) + return false; + if (child.type === Fragment && !ensureValidVNode(child.children)) + return false; + return true; + }) ? vnodes : null; + } + + function toHandlers(obj, preserveCaseIfNecessary) { + const ret = {}; + if (!isObject(obj)) { + warn(`v-on with no argument expects an object value.`); + return ret; + } + for (const key in obj) { + ret[preserveCaseIfNecessary && /[A-Z]/.test(key) ? `on:${key}` : toHandlerKey(key)] = obj[key]; + } + return ret; + } + + const getPublicInstance = (i) => { + if (!i) + return null; + if (isStatefulComponent(i)) + return getExposeProxy(i) || i.proxy; + return getPublicInstance(i.parent); + }; + const publicPropertiesMap = ( + // Move PURE marker to new line to workaround compiler discarding it + // due to type annotation + /* @__PURE__ */ extend(/* @__PURE__ */ Object.create(null), { + $: (i) => i, + $el: (i) => i.vnode.el, + $data: (i) => i.data, + $props: (i) => shallowReadonly(i.props) , + $attrs: (i) => shallowReadonly(i.attrs) , + $slots: (i) => shallowReadonly(i.slots) , + $refs: (i) => shallowReadonly(i.refs) , + $parent: (i) => getPublicInstance(i.parent), + $root: (i) => getPublicInstance(i.root), + $emit: (i) => i.emit, + $options: (i) => resolveMergedOptions(i) , + $forceUpdate: (i) => i.f || (i.f = () => queueJob(i.update)), + $nextTick: (i) => i.n || (i.n = nextTick.bind(i.proxy)), + $watch: (i) => instanceWatch.bind(i) + }) + ); + const isReservedPrefix = (key) => key === "_" || key === "$"; + const hasSetupBinding = (state, key) => state !== EMPTY_OBJ && !state.__isScriptSetup && hasOwn(state, key); + const PublicInstanceProxyHandlers = { + get({ _: instance }, key) { + const { ctx, setupState, data, props, accessCache, type, appContext } = instance; + if (key === "__isVue") { + return true; + } + let normalizedProps; + if (key[0] !== "$") { + const n = accessCache[key]; + if (n !== void 0) { + switch (n) { + case 1 /* SETUP */: + return setupState[key]; + case 2 /* DATA */: + return data[key]; + case 4 /* CONTEXT */: + return ctx[key]; + case 3 /* PROPS */: + return props[key]; + } + } else if (hasSetupBinding(setupState, key)) { + accessCache[key] = 1 /* SETUP */; + return setupState[key]; + } else if (data !== EMPTY_OBJ && hasOwn(data, key)) { + accessCache[key] = 2 /* DATA */; + return data[key]; + } else if ( + // only cache other properties when instance has declared (thus stable) + // props + (normalizedProps = instance.propsOptions[0]) && hasOwn(normalizedProps, key) + ) { + accessCache[key] = 3 /* PROPS */; + return props[key]; + } else if (ctx !== EMPTY_OBJ && hasOwn(ctx, key)) { + accessCache[key] = 4 /* CONTEXT */; + return ctx[key]; + } else if (shouldCacheAccess) { + accessCache[key] = 0 /* OTHER */; + } + } + const publicGetter = publicPropertiesMap[key]; + let cssModule, globalProperties; + if (publicGetter) { + if (key === "$attrs") { + track(instance, "get", key); + markAttrsAccessed(); + } else if (key === "$slots") { + track(instance, "get", key); + } + return publicGetter(instance); + } else if ( + // css module (injected by vue-loader) + (cssModule = type.__cssModules) && (cssModule = cssModule[key]) + ) { + return cssModule; + } else if (ctx !== EMPTY_OBJ && hasOwn(ctx, key)) { + accessCache[key] = 4 /* CONTEXT */; + return ctx[key]; + } else if ( + // global properties + globalProperties = appContext.config.globalProperties, hasOwn(globalProperties, key) + ) { + { + return globalProperties[key]; + } + } else if (currentRenderingInstance && (!isString(key) || // #1091 avoid internal isRef/isVNode checks on component instance leading + // to infinite warning loop + key.indexOf("__v") !== 0)) { + if (data !== EMPTY_OBJ && isReservedPrefix(key[0]) && hasOwn(data, key)) { + warn( + `Property ${JSON.stringify( + key + )} must be accessed via $data because it starts with a reserved character ("$" or "_") and is not proxied on the render context.` + ); + } else if (instance === currentRenderingInstance) { + warn( + `Property ${JSON.stringify(key)} was accessed during render but is not defined on instance.` + ); + } + } + }, + set({ _: instance }, key, value) { + const { data, setupState, ctx } = instance; + if (hasSetupBinding(setupState, key)) { + setupState[key] = value; + return true; + } else if (setupState.__isScriptSetup && hasOwn(setupState, key)) { + warn(`Cannot mutate + + {{ super() }} + +{% endblock %} diff --git a/custom/k2.html b/custom/k2.html new file mode 100644 index 0000000..6609fe8 --- /dev/null +++ b/custom/k2.html @@ -0,0 +1,252 @@ +{% extends "main.html" %} + +{% block styles %} +{{ super() }} + + + + +{% endblock %} + +{% block content %} + {% if config.theme.language == 'en' %} +

Installation

+ {% else %} +

安装

+ {% endif %} +
+ +
+
+ {% if config.theme.language == 'en' %} + Build + {% else %} + 版本 + {% endif %} +
+
+
+
[[ item ]] +
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + OS + {% else %} + 操作系统 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ Pytorch +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Platform + {% else %} + 平台 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Install + {% else %} + 安装方法 + {% endif %} +
+
+
[[ install ]]
+
+
+ +
+ + {{ super() }} +{% endblock %} + +{% block scripts %} + {{ super() }} + + + + + + + +{% endblock %} diff --git a/custom/main.html b/custom/main.html new file mode 100644 index 0000000..0201271 --- /dev/null +++ b/custom/main.html @@ -0,0 +1,21 @@ +{% extends "base.html" %} +{% block extrahead %} + +{% endblock %} + +{% block content %} + +{% if page.nb_url %} + + {% include ".icons/material/download.svg" %} + +{% endif %} + + +{{ super() }} + +{% endblock content %} \ No newline at end of file diff --git a/custom/message.html b/custom/message.html new file mode 100644 index 0000000..ea39d18 --- /dev/null +++ b/custom/message.html @@ -0,0 +1,26 @@ +{% extends "main.html" %} + +{% block content %} + {{ super() }} + +

{{ 留言板 }}

+ + + + + + + + +
+ + +{% endblock %} diff --git a/custom/models.html b/custom/models.html new file mode 100644 index 0000000..28a47fe --- /dev/null +++ b/custom/models.html @@ -0,0 +1,335 @@ +{% extends "main.html" %} + +{% block styles %} +{{ super() }} + + + + + +{% endblock %} + +{% block content %} + {% if config.theme.language == 'en' %} +

Select a model

+ {% else %} +

选择模型

+ {% endif %} + + +
+ +
+
+ {% if config.theme.language == 'en' %} + Language + {% else %} + 语言 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Encoder + {% else %} + 声学模型 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Type + {% else %} + 类型 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Engine + {% else %} + 推理引擎 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Mode + {% else %} + 模式 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Version + {% else %} + 版本 + {% endif %} +
+
+
+
[[ item ]]
+
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Download + {% else %} + 下载地址 + {% endif %} +
+
+
[[ download ]]
+
+
+ +
+
+ {% if config.theme.language == 'en' %} + Description + {% else %} + 模型简介 + {% endif %} +
+
+
[[ description ]]
+
+
+ +
+ + {{ super() }} +{% endblock %} + +{% block scripts %} + {{ super() }} + + + + + + +{% endblock %} diff --git a/custom/partials/comments.html b/custom/partials/comments.html new file mode 100644 index 0000000..381e5e3 --- /dev/null +++ b/custom/partials/comments.html @@ -0,0 +1,101 @@ +{% if page.meta.comments %} + +

{{ lang.t("meta.comments") }}

+ + {% if config.theme.language == 'zh' %} +
+ 如果您通过 github 登录评论有困难,可以在留言板把你的问题告诉我们。
+ 您也可以加入我们的微信QQ群与广大开发者一起交流,也欢迎大家关注我们的微信公众号。 +
+ {% endif %} + + + + {% if config.theme.language == 'en' %} + + {% else %} + + {% endif %} +{% endif %} diff --git a/custom/resources.html b/custom/resources.html new file mode 100644 index 0000000..912faf7 --- /dev/null +++ b/custom/resources.html @@ -0,0 +1,71 @@ + + {% extends "main.html" %} + + {% block styles %} + {{ super() }} + + {% endblock %} + + {% block content %} + {{ super() }} +
+ + + + + +
+
+ + {% endblock %} + + {% block scripts %} + {{ super() }} + + + + {% endblock %} \ No newline at end of file diff --git a/docs-zh-CN/blog/.authors.yml b/docs-zh-CN/blog/.authors.yml new file mode 100644 index 0000000..e523480 --- /dev/null +++ b/docs-zh-CN/blog/.authors.yml @@ -0,0 +1,11 @@ +authors: + pkufool: + name: pkufool + description: Xiaomi AI Lab + avatar: https://avatars.githubusercontent.com/u/11765074?v=4 + url: https://pkufool.github.io/ + yaozengwei: + name: yaozengwei + description: Xiaomi AI Lab + avatar: https://avatars.githubusercontent.com/u/19564185?v=4 + url: https://scholar.google.com/citations?user=f3Eo9S0AAAAJ&hl=zh-CN \ No newline at end of file diff --git a/docs-zh-CN/blog/index.md b/docs-zh-CN/blog/index.md new file mode 100644 index 0000000..e69de29 diff --git a/docs-zh-CN/blog/posts/homepage.md b/docs-zh-CN/blog/posts/homepage.md new file mode 100644 index 0000000..9830984 --- /dev/null +++ b/docs-zh-CN/blog/posts/homepage.md @@ -0,0 +1,69 @@ +--- +title: 新一代 Kaldi 主页发布 +date: 2024-04-01 +categories: + - Document +comments: true +authors: + - pkufool +slug: homepage +--- + +与 Kaldi 是个单一项目不同,新一代 Kaldi 是一个项目集合,整个大项目中分成了很多负责不同子功能的小项目,各个小项目都相对独立且有自己的文档系统,这就给开发者带来了一些麻烦,很多开发者经常困惑各个子项目之间是什么关系? 他们如何分工? 该怎么最快找到自己需要的文档? 为此,我们开发了一个简单的大项目主页,这个主页不是为了替代各子项目的文档,而是希望它成为整个大项目的总入口,方便大家最快找到自己需要的东西。 + + + +整个主页主要包含以下几个部分,下面做稍微详细的介绍。 + +## 首页 + +首页目前主要是罗列了新一代 Kaldi 目前发布的主要项目和对应简介。~~拉到下面看,上面部分看字就行,搞成这样纯粹为了好看~~ + +[![](../../assets/images/homepage/main.jpg)](../../index.md) + +## 快速开始 + +快速开始页是各个项目和任务的开始指引,我们希望用户可以从这里快速找到开始一个项目/任务的路径和文档。每个项目的简单介绍和安装方法也会放在此处。 + +[![](../../assets/images/homepage/get_started.jpg)](../../get-started/index.md) + +## 模型 + +模型页会有我们发布的一系列预训练模型,开发者们最常问的问题就是我该用哪个模型,我们希望这个页面能够回答大多数这样的问题。 + +[![](../../assets/images/homepage/model.jpg)](../../models/asr.md) + +## 演示 + +演示也是大家常说的 Demo, 这个页面会列出各个任务我们精心制作的 Demo,包括但不限于视频、APK、exe、huggingface space 等等。 + +[![](../../assets/images/homepage/demo.jpg)](../../demos/asr.md) + +## 重要事件 + +新一代 Kaldi 项目众多,有些开发者可能没有订阅 github,就算订阅了,消息太多估计也看不过来,所以这个页面,我们会列出项目的一些重要更新和发布的一些新特性。 + +[![](../../assets/images/homepage/event.jpg)](../../events.md) + +## 论文 + +这个最好理解,就是团队发表的论文列表。 + +[![](../../assets/images/homepage/papers.jpg)](../../publications.md) + +## 资源文件 + +资源文件页是给用户提供的一个快捷搜索入口,新一代 Kaldi 有很多项目,每个项目都会发布预训练模型,Demo 等等,这个页面将几乎左右的资源都汇总了,用户可以在页面上通过搜索的方式快速获得对应的链接,目前支持常规的字符串搜索和简单的正则支持。 + +[![](../../assets/images/homepage/resources.jpg)](../../resources.md) + +## 博客 + +博客栏目是我们不定期发布的一些技术解读和各种希望与开发者分享的材料,后面我们也会把微信公众号的文章在这里同步发表。 + +[![](../../assets/images/homepage/blog.jpg)](../../blog/index.md) + +## 留言板 + +其实我们各个页面下面都有对应的评论,项目的评论系统是基于 github 的,选择 github 也是为了便于管理,但考虑到中国大陆有些同学登 github 比较痛苦,所以中文的留言板我们给大家留了一个后门,可以不用登录即可留言,当然,我们不建议大家使用这个留言板,有条件还是使用 github 的评论系统,这样管理和追踪评论都比较方便。 +[![](../../assets/images/homepage/message.jpg)](../../message.md) \ No newline at end of file diff --git a/docs-zh-CN/blog/posts/zipformer.md b/docs-zh-CN/blog/posts/zipformer.md new file mode 100644 index 0000000..798561d --- /dev/null +++ b/docs-zh-CN/blog/posts/zipformer.md @@ -0,0 +1,278 @@ +--- +title: Zipformer 模型详解 +date: 2023-12-15 +categories: + - encoder +comments: true +authors: + - yaozengwei + - pkufool +slug: zipformer-details +--- + +## 摘要 + +[Zipformer](https://arxiv.org/pdf/2310.11230) 是新一代 kaldi 团队最新研发的序列建模模型。相比较于 [Conformer](https://arxiv.org/abs/2005.08100)、[Squeezeformer](https://arxiv.org/abs/2206.00888)、[E-Branchformer](https://arxiv.org/abs/2210.00077) 等主流 ASR 模型,Zipformer 具有 **效果更好、计算更快、更省内存** 等优点。Zipformer 在 LibriSpeech、Aishell-1 和 WenetSpeech 等常用数据集上取得了 **当前最好** 的 ASR 结果。 + + + +本期文章将解析 Zipformer 的具体创新点,主要包括: +- 高效的模型结构:**Downsampled encoder structure** 和 **Zipformer block** +- 新 normalization:**BiasNorm** +- 新激活函数:**Swoosh** +- 新优化器:**ScaledAdam** 优化器 +- 激活值限制:**Balancer** 和 **Whitener** + +> 建议读者阅读论文了解更多细节:[https://arxiv.org/pdf/2310.11230.pdf](https://arxiv.org/pdf/2310.11230.pdf) + +## 方法 + +### 1. Downsampled encoder structure + +图 1 展示了 Zipformer 总体框架图,由一个 Conv-Embed 模块和多个 encoder stack 组成。**不同于 Conformer 只在一个固定的帧率 25Hz 操作,Zipformer 采用了一个类似于 U-Net 的结构,在不同帧率上学习不同时间分辨率的时域表征。** + +![图1:Zipformer 总体框架](../../assets/images/zipformer/98ff57ea-a436-4636-80d4-dd66b271a194.png) + +首先,Conv-Embed 将输入的 100Hz 的声学特征下采样为 50 Hz 的特征序列;然后,由 6 个连续的 encoder stack 分别在 50Hz、25Hz、12.5Hz、6.25Hz、12.5Hz 和 25Hz 的采样率下进行时域建模。除了第一个 stack 外,其他的 stack 都采用了降采样的结构。在 stack 与 stack 之间,特征序列的采样率保持在 50Hz。不同的 stack 的 embedding 维度不同,中间stack 的 embedding 维度更大。每个 stack 的输出通过截断或者补零的操作,来对齐下一个 stack 的维度。Zipformer 最终输出的维度,取决于 embedding 维度最大的 stack。 + +对于降采样的 encoder stack,成对出现的 Downsample 和 Upsample 模块负责将特征长度对称地放缩。**我们采用几乎最简单的方法实现 Downsample 和 Upsample 模块**。例如,当降采样率为 2 时,Downsample 学习两个标量权重,用来将相邻的两帧加权求和了;Upsample 则只是简单地将每一帧复制为两帧。最后,通过一个 Bypass 模块,以一种可学习的方式结合 stack 的输入和输出。 + +### 2. Zipformer block + +Conformer block 由四个模块组成:feed-forward、Multi-Head Self-Attention (MHSA)、convolution、feed-forward。MHSA 模块通过两个步骤学习全局时域信息:**基于内积计算注意力权重,以及利用算好的注意力权重汇聚不同帧的信息。** 然而,MHSA 模块通常占据了大量的计算,因为以上两步操作的计算复杂度都是平方级别于序列长度的。因此,**我们将 MHSA 模块根据这两个步骤分解为两个独立的模块**:**Multi-Head Attention Weight (MHAW)** 和 **Self-Attention (SA)。** 这样一来,我们可以 **通过在一个 block 里面使用一个 MHAW 模块和两个 SA 模块,以高效的方式实现两次注意力建模**。此外,我们还提出了一个 **新的模块 Non-Linear Attention (NLA) ,充分利用已经算好的注意力权重,进行全局时域信息学习。** + +![图2:Zipformer block 结构图](../../assets/images/zipformer/e7955bcd-ce1a-4f1b-89f9-bc2e4b64fce8.png) + +图 2 展示了 Zipformer block 的结构图,其深度大约是 Conformer block 的两倍。**核心的思想是通过复用注意力权重来节省计算和内存。** 具体而言,block 输入先被送到 MHAW 模块计算注意力权重,并分享给 NLA 模块和两个 SA 模块使用。同时,block 输入也被送到 feed-forward 模块,后面接着 NLA 模块。接着是两个连续的模块组,每组包含 SA、convolution 和 feed-forward。最后,由一个 BiasNorm 模块来将 block 输出进行 normalize。除了普通的加法残差连接,每个 Zipformer block 还使用了两个 Bypass 模型,用于结合 block 输入和中间模块的输出,分别位于 block 的中间和尾部。**值得注意的是,我们并没有像常规的 Transformer 模型一样,对每个模块都使用 normalization layer 去周期性地调整激活值的范围,这得益于我们使用的 ScaledAdam 优化器可以为各个模型自动学习参数的 scale。** + +#### Non-Linear Attention + +图 3 展示了 NLA 模块的结构。类似于 SA 模块,**它利用 MHAW 模块计算好的注意力权重,沿着时间轴汇聚不同帧的向量。** 具体而言,它使用三个 linear 将输入转换为 A、B、C,每个的维度为输入维度的 3/4 倍。模块的输出为 $linear(A ⊙ attention(tanh(B) ⊙ C))$,⊙ 表示点乘,$attention$ 表示利用一个注意力头的权重对不同帧汇聚,$linear$ 负责恢复特征的维度。 + +![图 3:Non-Linear Attention 模块](../../assets/images/zipformer/bf07c0ce-b908-4b0f-97f1-b77204c4b5c6.png) + +#### Bypass + +Bypass 模块学习一个逐通道的权重 $\mathbf{c}$,结合模块输入 $\mathbf{x}$ 和模块输出 $\mathbf{y}$:$(1-\mathbf{c}) \odot \mathbf{x} + \mathbf{c} \odot \mathbf{y}$。我们发现,在训练早期通过约束 $\mathbf{c}$ 的最小值让模块接近 “straight-through” 有助于稳定模型训练。 + +### 3. BiasNorm + +Conformer 使用 [LayerNorm](https://arxiv.org/abs/1607.06450) 来 normalize 激活值,给定 $D$ 维的向量 $\mathbf{x}$。LayerNorm 的公式为: + +$$ +\mathrm{LayerNorm}(\mathbf{x}) = \frac{\mathbf{x} - \mathrm{E}[\mathbf{x}]}{\sqrt{\mathrm{Var}[\mathbf{x}]+ \epsilon}} \odot \boldsymbol\gamma + \boldsymbol\beta. +$$ + +LayerNorm 先计算均值 $\mathrm{E}[x]$ 和方差 $\mathrm{Var}[x]$,用于向量标准化,将向量长度调整调整为 $\sqrt{D}$ 。然后,利用逐通道的放缩因子 $\boldsymbol\gamma$ 和偏置 $\boldsymbol\beta$ 进行元素变换,这有助于调整不同模块对整个模型的相对贡献。 + +然而,我们观察到使用 LayerNorm 的 Conformer 模型存在着两种失败的情况:1)**有时候会将某个特征维度设置得非常大,例如 50 左右,我们认为这是模型在抵制 LayerNorm 完全消除长度的机制,这个非常大的数可以在 normalize 的过程中保留其他维度的一部分长度信息。** 2)**有些模块(例如 feed-forward 和 convolution)的输出值非常小,例如 1e-6。我们认为在模型开始训练的时候,还没有学到有用信息的模块被防缩因子 $\boldsymbol\gamma$ 通过接近 0 关闭了。如果放缩因子 $\boldsymbol\gamma$ 在 0 左右震荡,反向传播的梯度也会随之翻转,这样一来,模块很难学到有用的信息,因为这是一个难以跳出的局部鞍点。** + +为了解决上述问题,我们提出 BiasNorm 模块来替换 LayerNorm: + +$$ +\mathrm{BiasNorm}(\mathbf{x}) = \frac{\mathbf{x}}{\mathrm{RMS}[\mathbf{x} - \mathbf{b}]} \cdot \exp(\gamma) +$$ + +其中,$\mathbf{b}$ 是可学习的逐通道的 bias,$\gamma$ 是一个可学习的标量。首先,我们去除了减均值的操作,因为它没有必要,除非它接着一个非线性变换。 **$\mathbf{b}$ 充当一个非常大的数,用于在 normalize 的过程中保留向量的一部分长度信息,这样一来,模型就不需要牺牲一个额外的维度来做这个事情。** 这或许有助于模型量化,因为它可以减少离群点的出现。**由于 $\exp(\gamma)$ 一直是正数,避免了出现梯度方向翻转导致的某些模块无法学习的问题。** + +### 4. Swoosh 激活函数 + +Conformer 采用的激活函数为 [Swish](https://arxiv.org/abs/1710.05941v1),其公式为: + +$$ +\mathrm{Swish}(x) = x \cdot (1+\exp(-x))^{-1}. +$$ + +我们提出两个新的激活函数用于代替 Swish,分别称为 SwooshR 和 SwooshL: + +$$ + \begin{split} + \mathrm{SwooshR}(x) &= \log(1 + \exp(x-1)) - 0.08x - 0.313261687, \\ + \mathrm{SwooshL}(x) &= \log(1 + \exp(x-4)) - 0.08x - 0.035. + \end{split} +$$ + +在 SwooshR 函数中,偏移值 0.313261687 是为了让函数经过原点;在 SwooshL函数中,偏移量 0.035 是经过实验调过的,比让它精确经过原点的结果略好。 + +如图 4 所示,SwooshL 近似于 SwooshR 向右偏移得到的。“L” 和 “R” 表示两个与 x 轴的交点中哪个靠近或经过原点。**类似于 Swish,SwooshR 和 SwooshL 都是有下确界的并且非单调的。** **相比较于 Swish,最大的区别在于 SwooshR 和 Swoosh 对于负数部分有一个斜率,这个可以避免输入一直是负数以及避免 Adam-type 的更新量分母(二阶梯度动量)太小。** 当将 SwooshR 用在 Zipformer 各个模块中时,我们发现,**那些带残差的模块,** 例如 feed-forward 和 ConvNeXt,**倾向于在激活函数前面的线性层中学习一个绝对值较大的负数 bias,以学习一种 “normally-off” 的行为。** 因此,我们把 SwooshL 函数用在这些 “normally-off” 的模块中,把 SwooshR 用在其他的模块:convolution 和 Conv-Embed 剩下的部分。 + + +![图4:激活函数 Swish,SwooshR 和 SwooshL](../../assets/images/zipformer/e1a30198-6ee9-43e5-8b54-42c4e83b283a.png) + +### 5. ScaledAdam + +我们提出一个 [Adam 优化器](https://arxiv.org/abs/1412.6980) 的 **parameter-scale-invariant** 版本,称为 ScaledAdam,它可以加快模型收敛。一方面,**ScaledAdam 根据参数 scale 放缩参数更新量,来确保不同 scale 的参数相对变化一致**;另一方面,**ScaledAdam 显式学习参数的 scale,这相当于给了一个额外的放缩参数 scale 的梯度**。 + +令 $f(\boldsymbol\theta)$ 为我们想要优化的 loss 函数,它对参数 $\boldsymbol\theta$ 是可导的。在每个步骤 $t$,Adam 计算参数梯度 $\mathbf{g}_t = \nabla_{\boldsymbol\theta}f(\boldsymbol\theta_{t-1})$,并更新梯度的一阶动量 $\mathbf{m}_t = \beta_1 \cdot \mathbf{m}_{t-1} + (1-\beta_1) \cdot \mathbf{g}_t$ 和二阶动量 $\mathbf{v}_t = \beta_2 \cdot \mathbf{v}_{t-1} + (1-\beta_2) \cdot \mathbf{g}_t^2$,此处, $\beta_1, \beta_2 \in [0, 1) $ 表示控制动量更新的系数。Adam 在步骤 t 的参数更新量为: + +$$ +\boldsymbol\Delta_t = \alpha_t \cdot \frac{\sqrt{1-\beta_2^t}}{1-\beta_1^t} \cdot \frac{\mathbf{m}_t}{\sqrt{\mathbf{v}_t}+\epsilon}, +$$ + +式中,$\alpha_t$ 通常由外部的 LR schedule 控制,$\frac{\sqrt{1-\beta_2^t}}{1-\beta_1^t}$ 为偏置纠正项。尽管 Adam 对梯度 scale 是 invariant 的,但是我们认为它仍然存在两个问题:1)**更新量 $\boldsymbol\Delta_t$ 并没有考虑参数的 scale(标记为 $r_{t-1}$),对于参数的相对更新量 $\boldsymbol\Delta_t/r_{t-1}$ 而言,Adam 可能会导致对 scale 小的参数学习太快,或者对 scale 大的参数学习太慢。** 2)**我们很难直接学习参数的 scale,因为参数 scale 的大小变化方向是高维度的梯度向量中一个特别具体的方向。尤其是 scale 变小的方向更加难学,因为在优化的过程中,梯度会引入噪声,参数的 scale 会倾向于不断增大。** + +#### Scaling update + +为了确保不同 scale 的参数的相对变化量 $\boldsymbol\Delta_t/r_{t-1}$ 一致,我们**在参数更新量中引入参数的 scale,来放缩更新量**: + +$$ +\boldsymbol\Delta_t' = \alpha_t \cdot r_{t-1} \cdot \frac{\sqrt{1-\beta_2^t}}{1-\beta_1^t} \cdot \frac{\mathbf{m}_t}{\sqrt{\mathbf{v}_t}+\epsilon}. +$$ + +我们计算 $\mathrm{RMS}[\boldsymbol\theta_{t-1}]$ 作为参数的 scale $r_{t-1}$。由于 ScaledAdam 比 Adam 更不容易发散,我们使用一个不需要很长 warm-up 的 LR schedule,称为 Eden;我们使用明显更大的学习率,因为参数的 RMS 值通常小于 1。 + +#### Learning parameter scale + +**为了显式学习参数的 scale,我们在将它当作一个真的存在的参数一样学习,仿佛我们将每个参数分解为 $ \boldsymbol\theta = r \cdot \boldsymbol\theta'$,并且我们是对参数 scale $r$ 和内部参数 $\boldsymbol\theta'$ 进行梯度下降。** 值得注意的是,**在具体实现中,我们并没有将每个参数进行分解,只是增加了一个额外的更新参数 scale 的梯度。** + +令 $h$ 为参数 scale 的梯度,存在 $h_t = \nabla_{r}f(\boldsymbol\theta_{t-1}) = \mathbf{g}_t \cdot \boldsymbol\theta_{t-1}'$。 +由于 Adam 对梯度的 scale 几乎是 invariant 的,我们可以计算 $h_t = \mathbf{g}_t \cdot (r_{t-1} \odot \boldsymbol\theta_{t-1}') = \mathbf{g}_t \cdot \boldsymbol\theta_{t-1}$。按照 Adam 算法,我们维护参数 scale 梯度 $h_t$ 的一阶动量 $n_t = \beta_1 \cdot n_{t-1} + (1-\beta_1) \cdot h_t$ 和二阶动量 $w_t = \beta_2 \cdot w_{t-1} + (1-\beta_2) \cdot h_t^2$。将参数 scale 从 $r_{t-1}$ 更新到 $r_{t}$ 对参数 $ \boldsymbol\theta$ 带来的变化为 $\boldsymbol\Delta_{t,r}' = (r_t - r_{t-1}) \odot \boldsymbol\theta_{t-1}'$。同样地,我们放缩参数 scale 对应的更新量: + +$$ +\begin{split} +\boldsymbol\Delta_{t,r}' +&= \eta \cdot \alpha_t \cdot r_{t-1} \cdot \frac{\sqrt{1-\beta_2^t}}{1-\beta_1^t} \cdot \frac{n_t}{\sqrt{w_t}+\epsilon} \odot \boldsymbol\theta_{t-1}' \\ +&= \eta \cdot \alpha_t \cdot \frac{\sqrt{1-\beta_2^t}}{1-\beta_1^t} \cdot \frac{n_t}{\sqrt{w_t}+\epsilon} \odot \boldsymbol\theta_{t-1}. +\end{split} +$$ + +式中, $\eta$ 用于放缩学习率,我们发现设置为 0.1 有助于稳定训练。此时,**参数的更新量由 $\boldsymbol\Delta_{t}'$ 变为 $\boldsymbol\Delta_{t,r}' + \boldsymbol\Delta_{t}'$,这等价于额外引入一个放缩参数 scale 的梯度。** 这个改动有助于我们简化模型结构,我们可以去掉大部分的 normalization 层,因此每个模块可以更容易得学习参数 scale ,来将激活值调整到一个合适的范围。 + +#### Eden schedule + +Eden schedule 的公式如下: + +$$ +\alpha_t = \alpha_{\mathrm{base}} \cdot \left(\frac{t^2 + \alpha_{\mathrm{step}}^2}{\alpha_{\mathrm{step}}^2}\right)^{-0.25} \cdot \left(\frac{e^2 + \alpha_{\mathrm{epoch}}^2}{\alpha_{\mathrm{epoch}}^2}\right)^{-0.25} \cdot \mathrm{linear}(\alpha_{\mathrm{start}}, t_{\mathrm{warmup}}, t). +$$ + +式中,$t$ 为 step,$e$ 为 epoch,$\alpha_{\mathrm{step}}$ 和 $ \alpha_{\mathrm{epoch}}$ 分别控制学习率在哪个 step 和 epoch 开始快速下降,$\mathrm{linear}(\alpha_{\mathrm{start}}, \alpha_{\mathrm{warmup}}, t)$ 表示一个线性 warmup,起点为 $\alpha_{\mathrm{start}}$,经过 $\alpha_{\mathrm{warmup}}$ 个 step 变为 1。$\alpha_{\mathrm{base}}$ 表示当没有 warmup 的情况下学习率的最大值。**让 Eden 同时依赖于 step 和 epoch 两个变量,是为了让模型的更新程度在经过一定的训练数据量(e.g., 1h)时,几乎不受 batch size 影响。** Eden 公式中,epoch 也可以替换为其他合适的变量,如经过多少小时的数据。 + +#### Efficient implementation + +**为了加快 ScaledAdam 计算,我们将参数根据 shape 分组,按照 batch 进行参数更新,** 值得注意的是这并不影响结果。Scaleadam 需要的内存使用量和 Adam 差不多,只需要额外的内存来存储参数 scale 对应梯度的一阶动量和二阶动量 $n_t$ 和 $w_t$。 + + +### 6. 激活值限制 + +**为了确保训练的一致性以及避免训练出性能差的模型,我们提出 Balancer 和 Whitener,用于约束模型的激活值。** Balancer 和 Whitener 以一种省内存的方式实现:在前向过程中,相当于是一个 no-op;**在反向过程中,计算一个对激活值施加限制的损失函数的梯度 $\mathbf{g}'$,加到原本的激活值梯度 $\mathbf{g}$ 上:$\mathbf{g} = \mathbf{g} + \mathbf{g}'$。** Balancer 和 Whitener 的应用位置没有遵循一个明确的规则,我们一般是在模型表现不好的时候,通过分析模型哪个地方出现问题,再对应地使用 Balancer 和 Whitener 去修复模型。 + +#### Balancer + +在每个特征通道上的分布上,我们观察到 **两种失败的模式:** 1) **有时候值的范围太大或太小,这个可能导致训练不稳定,尤其是使用半精度训练的时候。** 2) **如果我们看 feed-forward 模型中激活函数前面的线性层的神经元,很多的值是负数,这个造成了参数浪费。** + +Balancer 通过对激活值施加限制:**最小和最大平均绝对值,** 分别标记为 $a_{\mathrm{min}}$ 和 $a_{\mathrm{max}}$;**最小和最大正数比例,** 分别标记为 $p_{\mathrm{min}}$ 和 $p_{\mathrm{max}}$。由于正数比例是不可导的,我们将限制转化为 standard-deviation-normalized mean $\mathrm{E/\sqrt{Var}}$:$f_{\mathrm{pos}\rightarrow\mathrm{E/\sqrt{Var}}}(x) = \mathrm{arctanh}(2x - 1) / (\sqrt{\pi}\cdot\log2)$,得到 $\mu_{\mathrm{min}} = f_{\mathrm{pos}\rightarrow\mathrm{E/\sqrt{Var}}}(p_{\mathrm{min}})$ 和 $\mu_{\mathrm{max}} = f_{\mathrm{pos}\rightarrow\mathrm{E/\sqrt{Var}}}(p_{\mathrm{max}})$。同时,我们将平均绝对值转化为 RMS:$f_{\mathrm{abs}\rightarrow\mathrm{RMS}}(x) = \sqrt{\pi/2} \cdot x$,得到 $r_{\mathrm{min}} = f_{\mathrm{abs}\rightarrow\mathrm{RMS}}(a_{\mathrm{min}})$ 和 $r_{\mathrm{max}} = f_{\mathrm{abs}\rightarrow\mathrm{RMS}}(a_{\mathrm{max}})$。具体而言,对于激活值 $\mathbf{x}$,限制函数定义为: + +$$ + \begin{split} + \mathcal{L}_{\mathrm{RMS}} &= |\log(\min(\max(\mathrm{RMS}[\mathbf{x}], r_{\mathrm{max}}),r_{\mathrm{min}})/\mathrm{RMS}[\mathbf{x}] )|, \\ + \mathcal{L}_{\mathrm{E/\sqrt{Var}}} &= | \mathrm{E}[\mathbf{x}] / \sqrt{\mathrm{Var}[\mathbf{x}]} - \mathrm{clamp}(\mathrm{E}[\mathbf{x}] / \sqrt{\mathrm{Var}[\mathbf{x}]}, \mu_{\mathrm{min}}, \mu_{\mathrm{max}})|, \\ + \mathcal{L}_{\mathrm{balancer}} &= \mathcal{L}_{\mathrm{RMS}} + \mathcal{L}_{\mathrm{E/\sqrt{Var}}}, + \end{split} +$$ + +式中,$\mathrm{RMS}[\mathbf{x}]$,$\mathrm{E}[\mathbf{x}]$ 和 $\sqrt{\mathrm{Var}[\mathbf{x}]}$ 为每个通道的统计量。 + +#### Whitener + +激活值的 **另一种失败的模式** 是: **协方差矩阵的特征值中,有一个或者少数几个特征值占据主导,剩下的特征值都特别小。这个现象通常发生在即将训练奔溃的模型中。** + +**Whitener 模块旨在通过限制协方差矩阵的特征值尽可能相同,来鼓励模块学习更有信息量的输出分布。** 具体而言,对于特征 $\mathbf{x} \in \mathcal{R}^{N \times D}$,我们计算协方差矩阵 $C = (\mathbf{x} - \mathrm{E}[\mathbf{x}])^T(\mathbf{x} - \mathrm{E}[\mathbf{x}])$,$\mathrm{E}[\mathbf{x}]$ 为各个通道的均值。Whitener 定义的限制函数为: + +$$ + \begin{split} + \mathcal{L}_{\mathrm{whitener}} &= (\sum_i \lambda_i^2/D) / (\sum_i \lambda_i/D)^2 \\ + &= +(\sum_{i}\sum_{j}C_{i,j}^2/D) / (\sum_{i}C_{i,i}/D)^2, \\ + \end{split} +$$ + +式中,$\boldsymbol\lambda=\{\lambda_1, \dots, \lambda_D\}$ 为协方差矩阵的特征值。 + +## 实验 + +### 1. 实验设置 + +#### Architecture variants + +我们构建了三个不同参数规模的 Zipformer 模型:small (Zipformer-S), medium (Zipformer-M),large (Zipformer-L)。对于 Zipformer 的 6 个 stack,注意力头的数量为 {4,4,4,8,4,4},卷积核大小为 {31,31,15,15,15,31}。对于每个注意力头,query/key 维度为 32,value 维度为 12。我们通过调节 encoder embedding dim,层的数量,feed-forward hidden dim 来得到不同参数规模的模型: + +![表 1:不同规模 Zipformer 的参数配置](../../assets/images/zipformer/552dae6e-b5ad-43e3-be81-3d140c0d65d7.png) + +#### 数据集 + +我们在三个常用的数据集上进行实验:1)[Librispeech](https://danielpovey.com/files/2015_icassp_librispeech.pdf),1000 小时英文数据;2)[Aishell-1](https://arxiv.org/abs/1709.05522),170 小时中文;3)[WenetSpeech](https://arxiv.org/abs/2110.03370),10000+ 小时中文数据。 + +#### 实现细节 + +我们通过 Speed perturb 对数据进行三倍增广,使用 [Pruned transducer](https://arxiv.org/abs/2206.13236) 作为 loss 训练模型,解码方法为 [modified-beam-search](https://arxiv.org/abs/2211.00484)(每帧最多吐一个字,beam size=4)。 + +默认情况下,我们所有的 Zipformer 模型是在 32GB NVIDIA Tesla V100 GPU 上训练。对于 LibriSpeech 数据集,Zipformer-M 和 Zipformer-L 在 4 个 GPU 上训练了 50 epoch,Zipformer-S 在 2 个 GPU 上训练了 50 个 epoch;对于 Aishell-1 数据集,所有 Zipformer 模型都在 2 个 GPU 上训练了 56 epoch;对于 WenetSpeech 数据集,所有 Zipformer 模型都在 4 个 GPU 上训练了 14 epoch。 + +### 2. 与 SOTA 模型比较 + +#### LibriSpeech + +表 2 展示了 Zipformer 和其他 SOTA 模型在 LibriSpeech 数据集上的结果。对于 Conformer,我们还列出了我们复现的结果以及其他框架复现的结果。值得注意的是,这些结果和 Conformer 原文仍然存在一定的差距。Zipformer-S 取得了比所有的 Squeezeformer 模型更低的 WER,而参数量和 FLOPs 都更少。Zipformer-L的性能显著超过 Squeezeformer-L,Branchformer 和 我们复现的 Conformer,而 FLOPs 却节省了 50% 以上。值得注意的是,当我们在 8 个 80G NVIDIA Tesla A100 GPU 上训练 170 epoch,Zipformer-L 取得了 2.00%/4.38% 的 WER,这是我们了解到的迄今为止第一个和 Conformer 原文结果相当的模型。 + +![表2:不同模型在 LibriSpeech 数据集的比较](../../assets/images/zipformer/47cae332-1c36-4b48-9108-b8b43b052d33.png) + +我们还比较了 Zipformer 和其他 SOTA 模型的计算效率和内存使用。图 5 展示了不同 encoder 在单个 NVIDIA Tesla V100 GPU 上推理 30 秒长的语音 batch 所需的平均计算时间和峰值内存使用量,batch size 设置为 30,确保所有的模型都不会 OOM。总的来说,与其他的 SOTA 模型比较,Zipformer 在性能和效率上取得了明显更好的 trade-off。尤其是 Zipformer-L,计算速度和内存使用显著优于其他类似参数规模的模型。 + +> 此外,我们在论文附录中也展示了 Zipformer 在 CTC 和 CTC/AED 系统中的性能,同样超过了 SOTA 模型。 +> CTC/AED 的代码在 https://github.com/k2-fsa/icefall/pull/1389。 + +![图 5:不同模型的计算速度和内存使用比较](../../assets/images/zipformer/6f00094f-8afd-478e-8790-94c449866659.png) + +#### Aishell-1 + +表 3 展示了不同模型在 Aishell-1 数据集的结果。相比较于 [ESPnet 框架](https://github.com/espnet/espnet) 实现的 Conformer,Zipformer-S 性能更好,参数更少。增大参数规模后,Zipformer-M 和 Zipformer-L 都超过了其他所有的模型。 + +![表 3:不同模型在 Aishell-1 数据集的比较](../../assets/images/zipformer/87b2acaa-d54d-45bb-bd61-add1526b1714.png) + +#### WenetSpeech + +表 4 展示了不同模型在 WenetSpeech 数据集的结果。Zipformer-M 和 Zipformer-L 都在 Test-Net 和 Test-Meeting 测试集上超过了其他所有的模型。Zipformer-S 的效果超过了 [ESPnet](https://github.com/espnet/espnet) 和 [Wenet](https://github.com/wenet-e2e/wenet) 实现的 Conformer,参数量却只有它们的 1/3。 + +![表 4:不同模型在 WenetSpeech 数据集的比较](../../assets/images/zipformer/4441dae6-840f-4a37-b5d6-e8bc69634255.png) + +### 3. 消融实验 + +我们在 LibriSpeech 数据集上进行了一系列消融实验,验证每一个模块的有效性,实验结果如表 5 所示。 + +![表 5:Zipformer 消融实验](../../assets/images/zipformer/6c783964-2f7a-45cc-b457-079f530445c8.png) + +#### Encoder structure + +我们移除了 Zipformer 的 Downsampled encoder structure,类似于 Conformer 在 Conv-Embed 中使用 4 倍降采样,得到一个 12 层的模型,每层的 embedding dim 为 512。该模型在两个测试集上的 WER 都有所上升,这表明 Zipformer 中采用的 Downsampled encoder structure 并不会带来信息损失,反而以更少的参数达到更好的性能。 + +#### Block structure + +由于每个 Zipfomer block 含有两倍于 Conformer block 的模块数量,我们将每个 Zipformer block 替换为两个 Conformer block,这导致了在 test-other 上的 WER 上升了 0.16%,并且带来更多的参数量,这体现了 Zipformer block 的结构优势。移除 NLA 或者 Bypass 模块都导致了性能下降。对于移除了 NLA 的模型,当我们移除注意力共享机制,这并没有带来性能提升,反而会带来更多的参数和计算量。我们认为在 Zipformer block 中两个注意力模块学习到的注意力权重具有高度一致性,共享注意力权重并不会有损模型性能。 + +#### Normalization layer + +将 BiasNorm 替换为 LayerNorm 导致在 test-clean 和 test-other 两个测试集上 WER 分别上升了 0.08% 和 0.18%,这表明了 BiasNorm 相对于 LayerNorm 的优势,可以对输入向量保留一定程度的长度信息。 + +#### Activation function + +当给 Zipformer 所有的模块都是用 SwooshR 激活函数的时候,test-clean 和 test-other 两个测试集上 WER 分别上升了 0.11% 和 0.42%,这表明给那些学习 “normally-off” 行为的模块使用 SwooshL 激活函数的优势。给所有的模块使用 Swish 激活函数导致了更严重的性能损失,这体现了 SwooshR 相对于 Swish 的优势。 + +#### Optimizer + +当我们使用 Adam 来训练 ScaledAdam 的时候,我们必须给每个模块配一个 BiasNorm 来防止模型不收敛,因为 Adam 无法像 ScaledAdam 一样很好地学习参数 scale 来放缩激活值的大小。我们给两个优化器都尝试了不同的学习率 $\alpha_{\mathrm{base}}$:ScaledAdam(0.025, 0.035, 0.045, 0.055),Adam(2.5, 5.0, 7.5, 10.0)。我们给 Adam 使用与 [Conformer](https://arxiv.org/abs/2005.08100) 原文一样的 LR schedule:$\alpha_t = \alpha_{\mathrm{base}} \cdot 512^{-0.5} \cdot \min(t^{-0.5}, t \cdot 10000^{-1.5})$。图 6 分别展示了使用 ScaledAdam 和 Adam 的模型在不同 epoch 时的平均 WER,以及对应的学习率,我们将它们最好的结果分别展示在表 5 中。与 Adam 训练的模型相比较,使用 ScaledAdam 训练的模型在 test-clean 和 test-other 两个测试集上的 WER 分别下降了 0.17% 和 0.72%,同时 ScaledAdam 收敛更快、效果更好。 + +![图 6:ScaledAdam 与 Adam 的比较](../../assets/images/zipformer/8869d81c-e978-4194-8303-83cb9e687fe8.png) + + +#### Activation constraints + +如表 6 所示,我们将 Balancer 移除掉后并不会带来明显的性能变化,但是没有对激活值的范围作限制会增大模型不收敛的风险,尤其是在使用混合精度训练的时候。移除掉 Whitener 导致了在 test-clean 和 test-other 两个测试集上分别下降了 0.04% 和 0.24%,这表明通过限制激活值的协方差矩阵特征值尽可能相同,有助于让提升模型性能。 + +![表 6](../../assets/images/zipformer/3360bb95-49f4-47f5-a458-a6edc71081ff.png) + +## 结语 + +目前,除了论文中展示的 LibriSpeech、Aishell-1 和 WenetSpeech 数据集外,我们的实验表明, Zipformer 在其它较大规模的 ASR 数据集上同样取得了新的 SOTA 结果。例如在 10000 h 的英文数据集 [GigaSpeech](https://arxiv.org/abs/2106.06909) 上,不使用外部语言模型时,在 dev/test 测试集上,66M Zipformer-M 的 WER 为 10.25/10.38,288M Zipformer 的 WER 为 10.07/10.19。值得一提的是,我们的初步实验表明,Zipformer 的相关机制(如 ScaledAdam、SwooshL 等)在语言模型和视觉模型上同样展示了有效性。 + +如果大家在使用 Zipformer 的过程中遇到相关问题,欢迎通过 github issue 或微信群等多个渠道反馈讨论。 \ No newline at end of file diff --git a/docs-zh-CN/demos/asr.md b/docs-zh-CN/demos/asr.md new file mode 100644 index 0000000..b4638b2 --- /dev/null +++ b/docs-zh-CN/demos/asr.md @@ -0,0 +1,52 @@ +--- +comments: true +title: 语音识别演示 +--- + +新一代 Kaldi 不仅提供语音识别[模型训练](https://github.com/k2-fsa/icefall){:target="_blank"}和[部署](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"}的方案,我们还发布了众多的预训练模型和相应的演示程序,供广大开发者体验学习。 + +## Huggingface space + +体验新一代 Kaldi 最直接最便捷的方式是用浏览器访问我们提供的 Huggingface space,目前支持包括中文、英文、中英文、中英粤、粤语、藏语、阿拉伯语、德语、法语、俄语等语言数十个个模型的体验。 + +体验地址[huggingface :hugging:](https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition){:target="_blank"}。 +> 对于大陆用户,如无法访问 huggingface,可以使用 [hf-mirror :hugging:](https://hf-mirror.com/spaces/k2-fsa/automatic-speech-recognition){:target="_blank"} 体验。 + +[![](../assets/images/asr_huggingface.png "新一代 Kaldi huggingface 语音识别空间")](https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition){:target="_blank"} + +### Webassembly + +新一代 Kaldi 还提供了 [webassembly](https://webassembly.org/){:target="_blank"} 支持,将模型的推理和解码完全迁移到浏览器端,不需花费服务器的计算资源。我们提供了如下一些模型的体验地址,如果你想用 webassembly 打包自己的模型,可以参考 [sherpa-onnx 文档](https://k2-fsa.github.io/sherpa/onnx/wasm/index.html){:target="_blank"}和 [sherpa-ncnn 文档](https://k2-fsa.github.io/sherpa/ncnn/wasm/index.html){:target="_blank"}。 + +| 语言 | 声学编码器 | 引擎 | 体验地址 | 模型地址 | +| ------------ | ---------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 英文 | zipformer | onnxruntime | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-en){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-onnx-en/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-en-2023-06-21-english){:target="_blank"} | +| 中英文 | zipformer | onnxruntime | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20-bilingual-chinese-english){:target="_blank"} | +| 中英文 | paraformer | onnxruntime | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en-paraformer){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en-paraformer/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-paraformer/paraformer-models.html#csukuangfj-sherpa-onnx-streaming-paraformer-bilingual-zh-en-chinese-english){:target="_blank"} | +| 中英粤(方言) | paraformer | onnxruntime | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-cantonese-en-paraformer){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-onnx-zh-cantonese-en-paraformer/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-paraformer/paraformer-models.html#csukuangfj-sherpa-onnx-streaming-paraformer-trilingual-zh-cantonese-en-chinese-cantonese-english){:target="_blank"} | +| 英文 | zipformer | ncnn | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-ncnn-en){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-ncnn-en/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/zipformer-transucer-models.html#csukuangfj-sherpa-ncnn-streaming-zipformer-en-2023-02-13-english){:target="_blank"} | +| 中英文 | zipformer | ncnn | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-ncnn-zh-en){:target="_blank"} [modelscope](https://modelscope.cn/studios/k2-fsa/web-assembly-asr-sherpa-ncnn-zh-en/summary){:target="_blank"} | [模型文档](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/zipformer-transucer-models.html#csukuangfj-sherpa-ncnn-streaming-zipformer-bilingual-zh-en-2023-02-13-bilingual-chinese-english){:target="_blank"} | + +### 视频字幕提取 + +字幕提取功能是我们基于语音识别制作的一个小工具,用户只需上传视频即可生成对应的字幕文件,中文、英文、中英文、俄语体验地址:[Huggingface :hugging:](https://huggingface.co/spaces/k2-fsa/generate-subtitles-for-videos){:target="_blank"}, [Huggingface 镜像站](https://hf-mirror.com/spaces/k2-fsa/generate-subtitles-for-videos){:target="_blank"}。 +演示视频见[:fontawesome-brands-bilibili: Bilibili](https://www.bilibili.com/video/BV19C4y1f7qb){:target="_blank"}。 + + +## 视频 + +为了便于大家快速看到演示的效果,我们还制作了很多视频供大家预览,请移步[:fontawesome-brands-bilibili: Bilibili](https://space.bilibili.com/1234519871/video?tid=0&special_type=&pn=1&keyword=&order=click){:target="_blank"}自行选择阅看。 + + +## apk & exe + +我们同样提供了一些编译好的安卓 APK 和 Windows 可执行程序,大家自行下载安装即可试用。 + +| 语言 | 引擎 | 平台 | 下载地址 | +| -------------------- | ----------- | ------- | ------------------------------------------------------------------------------------------------------ | +| 中文 | onnxruntime | 安卓 | [链接](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-zh.apk){:target="_blank"} | +| 英文 | onnxrumtime | 安卓 | [链接](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-en.apk){:target="_blank"} | +| 中英文 | onnxrumtime | 安卓 | [链接](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-bilingual.*apk){:target="_blank"} | +| 中英文 | ncnn | 安卓 | [链接](../resources.md?s=ncnn.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-bilingual.*apk){:target="_blank"} | +| 全语言(自行下载模型) | onnxruntime | Windows | [链接](../resources.md?s=asr.*exe){:target="_blank"} | +| 全语言(自行下载模型) | ncnn | Windows | [链接](../resources.md?s=ncnn.*exe){:target="_blank"} | \ No newline at end of file diff --git a/docs-zh-CN/demos/kws.md b/docs-zh-CN/demos/kws.md new file mode 100644 index 0000000..1052e0d --- /dev/null +++ b/docs-zh-CN/demos/kws.md @@ -0,0 +1,17 @@ +--- +comments: true +title: 关键词识别演示 +--- + +# 关键词检测 + +目前我们提供了安卓 APK 和 视频两种演示,详见下表。 + + +| 语言 | 类型 | 地址 | 如何复现 | +| ---- | ---- | ------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 中文 | 视频 | [:fontawesome-brands-bilibili: Bilibili](https://www.bilibili.com/video/BV1Nw411J7K6){:target="_blank"} | [模型](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) [代码](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | +| 中文 | apk | [arm64-v8a/armeabi-v7a/x86/x86_64](../resources.md?s=kws.*wenetspeech.*apk){:target="_blank"} | [模型](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) [代码](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | +| 英文 | apk | [arm64-v8a/armeabi-v7a/x86/x86_64](../resources.md?s=kws.*gigaspeech.*apk){:target="_blank"} | [模型](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-gigaspeech-20240219.tar.gz) [代码](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | + +> 如果上述下载链接(尤其是 github/huggingface 链接)被墙无法下载,可使用我们的提供的[下载工具](https://r.kingway.fun/k2-sync/download){:target="_blank"}下载。 diff --git a/docs-zh-CN/demos/tts.md b/docs-zh-CN/demos/tts.md new file mode 100644 index 0000000..4cde58c --- /dev/null +++ b/docs-zh-CN/demos/tts.md @@ -0,0 +1,4 @@ +--- +comments: true +title: 语音合成演示 +--- diff --git a/docs-zh-CN/events.md b/docs-zh-CN/events.md new file mode 100644 index 0000000..8de0a66 --- /dev/null +++ b/docs-zh-CN/events.md @@ -0,0 +1,25 @@ +--- +comments: true +title: 新一代 Kaldi 历程 +--- + +## 2024 年 + +### 3月 + +#### icefall + +- 增加基于 openfst 的 CTC 流式 HLG 解码。[代码](https://github.com/k2-fsa/icefall/pull/1557){:target="_blank"} +- 增加广东话 recipe,zipformer 模型,使用 [MDCC](https://arxiv.org/pdf/2201.02419.pdf){:target="_blank"} 数据集。[代码](https://github.com/k2-fsa/icefall/pull/1537){:target="_blank"} [模型](https://huggingface.co/zrjin/icefall-asr-mdcc-zipformer-2024-03-11/){:target="_blank"} +- 增加使用 [LoRA](https://arxiv.org/abs/2106.09685){:target="_blank"} 微调的 recipe。[代码](https://github.com/k2-fsa/icefall/pull/1540){:target="_blank"} +- 增加使用 [adapter](https://arxiv.org/pdf/1902.00751.pdf){:target="_blank"} 微调的 recipe。[代码](https://github.com/k2-fsa/icefall/pull/1512){:target="_blank"} [文档](https://k2-fsa.github.io/icefall/recipes/Finetune/adapter/finetune_adapter.html){:target="_blank"} +- 增加使用预训练 zipformer 做微调的 recipe。[代码](https://github.com/k2-fsa/icefall/pull/1484){:target="_blank"} [文档](https://k2-fsa.github.io/icefall/recipes/Finetune/from_supervised/finetune_zipformer.html){:target="_blank"} +- 增加使用 wenetspeech 和 multi-zh-han 数据集微调 whisper 模型的 recipe。 [代码](https://github.com/k2-fsa/icefall/pull/1483){:target="_blank"} + +#### sherpa + +- sherpa-onnx 支持语言识别(spoken language identification), 使用 whisper 实现。 [huggingface space](https://huggingface.co/spaces/k2-fsa/spoken-language-identification){:target="_blank"} +- sherpa-onnx 支持 RISC-V 平台。 +- sherpa-onnx kws 支持 python、c API 和 webassembly。 +- sherpa-ncnn 支持 android wear demo。[代码](https://github.com/k2-fsa/sherpa-ncnn/pull/319){:target="_blank"} [演示视频](https://www.bilibili.com/video/BV1qS421w7cK/){:target="_blank"} +- sherap 增加 Whisper TensorRT-LLM 推理支持。[代码](https://github.com/k2-fsa/sherpa/pull/551){:target="_blank"} diff --git a/docs-zh-CN/get-started/icefall.md b/docs-zh-CN/get-started/icefall.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs-zh-CN/get-started/icefall.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs-zh-CN/get-started/index.md b/docs-zh-CN/get-started/index.md new file mode 100644 index 0000000..61e7e29 --- /dev/null +++ b/docs-zh-CN/get-started/index.md @@ -0,0 +1,22 @@ +--- +comments: true +--- + +# 新一代 Kaldi + +新一代 Kaldi 是一个开源的智能语音工具集,几乎涵盖了构建智能语音系统的方方面面。下图简单罗列了新一代 Kaldi 的项目矩阵,包括数据、训练到部署全链条。更多的项目见项目的 [github 主页](https://github.com/k2-fsa/){:target="_blank"}。 你也可以从这篇[旧文](https://mp.weixin.qq.com/s/f0vpatseghLi2piYpUpmQQ){:target="_blank"}中了解新一代 Kaldi 的起源与故事。 + +![](../assets/images/ngk-matrix.png) + +## 特性及功能 + +智能语音领域包含非常多的子任务和子领域,新一代 Kaldi 目前支持语音识别(ASR)、语音合成(TTS)、关键词检测(KWS)、话音检测(VAD)、说话人识别(Speaker identification)、语种识别(Spoken language identification) 等等。其中有些提供了包含训练和部署全链路的技术,有些是基于优秀的第三方开源库做的部署支持,具体细节如下所示: + +| 任务 | 训练 | 部署 | 相关文档 | +|----------|---------|---------|-------------| +| 语音识别(ASR) | :material-check: | :material-check: | [训练](./icefall.md) [部署](./sherpa/index.md) | +| 语音合成(TTS) | :material-check: | :material-check: | [训练](./sherpa/index.md) [部署](./sherpa/onnx.md) | +| 关键词(KWS) | :material-check: | :material-check: | [训练](./icefall.md) [部署](./sherpa/onnx.md) | +| 话音检测(VAD) | :material-close: | :material-check: | [部署](./sherpa/onnx.md) | +| 说话人识别(Speaker identification) | :material-close: | :material-check: | [部署](./sherpa/onnx.md) | +| 语种识别(Spoken language identification) | :material-close: | :material-check: | [部署](./sherpa/onnx.md) | diff --git a/docs-zh-CN/get-started/k2.md b/docs-zh-CN/get-started/k2.md new file mode 100644 index 0000000..1224017 --- /dev/null +++ b/docs-zh-CN/get-started/k2.md @@ -0,0 +1,24 @@ +--- +template: k2.html +comments: true +--- + +以上只列出了最新几个版本的安装方法,如您还有更高阶的需求,如从源码编译,请参考项目详细的[安装文档](https://k2-fsa.github.io/k2/installation/index.html){:target="_blank"}。 + +# k2 + +社区开发者们经常把新一代 Kaldi 统称为 k2 (解读为 Kaldi 第二代), 这里提到的 [k2](https://github.com/k2-fsa/k2){:target="_blank"} 是项目的其中一个子项目,可能也是最硬核的一个项目? + +## k2 是什么 + +总的来说,k2 是一个序列建模的算法集合。众所周知,语音识别有几个常用的建模方法,如 CTC、Transducer、MMI 等,k2 就是用来实现这些建模方法的。比如 k2 中有使用可微分有限状态自动机实现的 CTC 损失函数, 及 MMI 损失函数,也有高效的 Transducer 损失函数实现 [pruned rnnt](https://arxiv.org/pdf/2206.13236.pdf){:target="_blank"}。 + +纯粹从工程角度看,你也可以把 k2 看作一个 GPU 加速的 FST 库,比如 k2 中在 GPU 上实现的基于 FST 的 CTC 解码,Transducer 解码。 应用 k2 可把图的搜索过程也一并放到 GPU 执行,可有效利用 GPU 的并行能力加速基于 FST 的算法,无论是训练还是解码。 + +k2 中实现了一个非常通用的不规则矩阵的数据结构 [RaggedTensor](https://k2-fsa.github.io/k2/python_tutorials/ragged/basics.html){:target="_blank"} 并且提供了一套非常简单的编程范式,k2 基于这个数据结构实现了可微分有限状态自动机,这个数据结构也让其它开发者可以在不规则矩阵上实现他们自己的算法。 + +## 如何开始 + +如果只是使用新一代 Kaldi 工具训练或者部署模型,没有基于 k2 实现自己建模方法的计划,那么可以完全不用理会 k2 的细节,只需参照上面方法安装好软件,遵照相应项目(icefall 或者 sherpa)的样例和说明文档使用 k2 就行。 + +如果想进行一些高阶的定制,可以先了解 k2 的[核心概念](https://k2-fsa.github.io/k2/core_concepts/index.html){:target="_blank"},然后阅读代码学习原理,最终实现自己的目的。 diff --git a/docs-zh-CN/get-started/lhotse.md b/docs-zh-CN/get-started/lhotse.md new file mode 100644 index 0000000..684c236 --- /dev/null +++ b/docs-zh-CN/get-started/lhotse.md @@ -0,0 +1,5 @@ +--- +comments: true +--- + +# lhotse diff --git a/docs-zh-CN/get-started/sherpa/index.md b/docs-zh-CN/get-started/sherpa/index.md new file mode 100644 index 0000000..6df937c --- /dev/null +++ b/docs-zh-CN/get-started/sherpa/index.md @@ -0,0 +1,5 @@ +--- +comments: true +--- + +# sherpa \ No newline at end of file diff --git a/docs-zh-CN/get-started/sherpa/ncnn.md b/docs-zh-CN/get-started/sherpa/ncnn.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs-zh-CN/get-started/sherpa/ncnn.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs-zh-CN/get-started/sherpa/onnx.md b/docs-zh-CN/get-started/sherpa/onnx.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs-zh-CN/get-started/sherpa/onnx.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs-zh-CN/get-started/sherpa/torch.md b/docs-zh-CN/get-started/sherpa/torch.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs-zh-CN/get-started/sherpa/torch.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs-zh-CN/index.md b/docs-zh-CN/index.md new file mode 100644 index 0000000..bdd542f --- /dev/null +++ b/docs-zh-CN/index.md @@ -0,0 +1,5 @@ +--- +template: home.html +title: 新一代 Kaldi +--- + diff --git a/docs-zh-CN/message.md b/docs-zh-CN/message.md new file mode 100644 index 0000000..b5d6dd8 --- /dev/null +++ b/docs-zh-CN/message.md @@ -0,0 +1,10 @@ +--- +template: message.html +title: 新一代 Kaldi 留言板 +--- + +把你在使用新一代 Kaldi 过程中遇到的任何问题或者对新一代 Kaldi 的意见和建议留言告诉我们!你也可以加入我们的[微信](../assets/pic/wechat_group.jpg){:target="_blank"}和[QQ](../assets/pic/qq_group.jpeg){:target="_blank"}社群和众多开发者一起交流学习!也欢迎大家关注我们的[微信公众号](../assets/pic/wechat_account.jpg){:target="_blank"}。 \ No newline at end of file diff --git a/docs-zh-CN/models/asr.md b/docs-zh-CN/models/asr.md new file mode 100644 index 0000000..9adccab --- /dev/null +++ b/docs-zh-CN/models/asr.md @@ -0,0 +1,20 @@ +--- +template: models.html +title: 语音识别模型 +comments: true +--- + +> 如果上述下载链接(尤其是 github/huggingface 链接)被墙无法下载,可使用我们的提供的[下载工具](https://r.kingway.fun/k2-sync/download){:target="_blank"}下载。 + +# 补充说明 + +以上只提供了部分使用人数较多的模型,更多的模型详见各个项目的文档: + +项目 | 推理引擎 | 文档链接 | +----|------|-----| +Sherpa | Libtorch | [Pre-trained models](https://k2-fsa.github.io/sherpa/sherpa/pretrained_models/index.html){:target="_blank"} | +Sherpa-onnx | onnxruntime | [Pre-trained models](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/index.html){:target="_blank"} | +Sherpa-ncnn | ncnn | [Pre-trained models](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/index.html){:target="_blank"} | +Icefall | Pytorch | 没有具体的文档,详见[项目](https://github.com/k2-fsa/icefall){:target="_blank"} 下面各个 Recipe 的 RESULTS.md 文件 | + +请根据自己需求 **仔细阅读文档**,筛选最适合自己的模型,如有不明白的地方可以评论告诉我们。 \ No newline at end of file diff --git a/docs-zh-CN/models/kws.md b/docs-zh-CN/models/kws.md new file mode 100644 index 0000000..4f7d0ec --- /dev/null +++ b/docs-zh-CN/models/kws.md @@ -0,0 +1,19 @@ +--- +title: 关键词检出模型 +comments: true +--- + +# 关键词检测模型 + +目前我们提供了中文和英文两个基础模型,都支持 Pytorch 和 onnxruntime 框架,Pytorch 模型主要用于微调,onnx 模型主要用于部署,可先使用 onnx 测试目标关键词的效果,如果达不到预期再考虑基于我们提供的基础模型微调。 + + +| 语言 | 推理框架 | 下载地址 | 使用方法 | 简介 | +|---------|-------------|------------|--------------|--------------| +| 中文 | Pytorch | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) | [训练及微调方法](https://github.com/k2-fsa/icefall/pull/1428){:target="_blank"} | 该模型是基于 Wenetspeech 1万小时训练,模型参数约为 3.3M,拼音(声韵母)建模,可作为基础模型用于微调。| +| 中文 | onnxruntime | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.1/sherpa-onnx-kws-zipformer-wenetspeech-3.3M-2024-01-01.tar.bz) [modelscope](https://www.modelscope.cn/models/pkufool/sherpa-onnx-kws-zipformer-wenetspeech-3.3M-2024-01-01/summary){:target="_blank"} | [部署教程](https://k2-fsa.github.io/sherpa/onnx/kws/pretrained_models/index.html#sherpa-onnx-kws-zipformer-wenetspeech-3-3m-2024-01-01-chinese){:target="_blank"} | 此为上述基础模型导出的 onnx,可用于 [sherpa-onnx](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"} 平台部署 | +| 英文 | Pytorch | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-gigaspeech-20240219.tar.gz) | [训练及微调方法](https://github.com/k2-fsa/icefall/pull/1428){:target="_blank"} | 该模型是基于 Gigaspeech 1万小时训练,模型参数约为 3.3M,BPE 建模,可作为基础模型用于微调。| +| 英文 | onnxruntime | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.1/sherpa-onnx-kws-zipformer-gigaspeech-3.3M-2024-01-01.tar.bz) [modelscope](https://www.modelscope.cn/models/pkufool/sherpa-onnx-kws-zipformer-gigaspeech-3.3M-2024-01-01/summary){:target="_blank"} | [部署教程](https://k2-fsa.github.io/sherpa/onnx/kws/pretrained_models/index.html#sherpa-onnx-kws-zipformer-gigaspeech-3-3m-2024-01-01-english){:target="_blank"} | 此为上述基础模型导出的 onnx,可用于 [sherpa-onnx](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"} 平台部署 | + +> 如果上述下载链接(尤其是 github/huggingface 链接)被墙无法下载,可使用我们的提供的[下载工具](https://r.kingway.fun/k2-sync/download){:target="_blank"}下载。 + diff --git a/docs-zh-CN/models/tts.md b/docs-zh-CN/models/tts.md new file mode 100644 index 0000000..30dce45 --- /dev/null +++ b/docs-zh-CN/models/tts.md @@ -0,0 +1,4 @@ +--- +title: 语音合成模型 +comments: true +--- diff --git a/docs-zh-CN/publications.md b/docs-zh-CN/publications.md new file mode 100644 index 0000000..7d7d16a --- /dev/null +++ b/docs-zh-CN/publications.md @@ -0,0 +1,24 @@ +--- +comments: true +title: 新一代 Kaldi 论文 +--- + +这里只列出了新一代 Kaldi 团队在研发新一代 Kaldi 过程中发表的论文,Daniel Povey 单独或者合作发表的论文可见其[主页](https://danielpovey.com/publications.html)或者[谷歌学术](https://scholar.google.com/citations?hl=zh-CN&user=y_-5FWAAAAAJ&view_op=list_works&sortby=pubdate)。 + +- "Zipformer: A faster and better encoder for automatic speech recognition", Zengwei Yao, Liyong Guo, Xiaoyu Yang, Wei Kang, Fangjun Kuang, Yifan Yang, Zengrui Jin, Long Lin, Daniel Povey, __ICLR 2024__ [[pdf]](https://arxiv.org/pdf/2310.11230.pdf) [[code]](https://github.com/k2-fsa/icefall/blob/master/egs/librispeech/ASR/zipformer/zipformer.py) [[解读]](./blog/posts/zipformer.md) + +- "Libriheavy: a 50,000 hours asr corpus with punctuation casing and context", Wei Kang, Xiaoyu Yang, Zengwei Yao, Fangjun Kuang, Yifan Yang, Liyong Guo, Long Lin, Daniel Povey, __ICASSP 2024__ [[pdf]](https://arxiv.org/pdf/2309.08105.pdf) [[code]](https://github.com/k2-fsa/libriheavy) + +- "PromptASR for contextualized ASR with controllable style", Xiaoyu Yang, Wei Kang, Zengwei Yao, Yifan Yang, Liyong Guo, Fangjun Kuang, Long Lin, Daniel Povey, __ICASSP 2024__ [[pdf]](https://arxiv.org/pdf/2309.07414.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/1250) + +- "Delay-penalized transducer for low-latency streaming asr", Wei Kang, Zengwei Yao, Fangjun Kuang, Liyong Guo, Xiaoyu Yang, Long Lin, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00490.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/654) [[code k2]](https://github.com/k2-fsa/k2/pull/976) + +- "Fast and parallel decoding for transducer", Wei Kang, Liyong Guo, Fangjun Kuang, Long Lin, Mingshuang Luo, Zengwei Yao, Xiaoyu Yang, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00484.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/250) [[code k2]](https://github.com/k2-fsa/k2/pull/926) + +- "Predicting multi-codebook vector quantization indexes for knowledge distillation", Liyong Guo, Xiaoyu Yang, Quandong Wang, Yuxiang Kong, Zengwei Yao, Fan Cui, Fangjun Kuang, Wei Kang, Long Lin, Mingshuang Luo, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00508.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/387) [[code]](https://github.com/k2-fsa/multi_quantization) + +- "Blank-regularized ctc for frame skipping in neural transducer", Yifan Yang, Xiaoyu Yang, Liyong Guo, Zengwei Yao, Wei Kang, Fangjun Kuang, Long Lin, Xie Chen, Daniel Povey, __Interspeech 2023__ [[pdf]](https://arxiv.org/pdf/2305.11558.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/730) + +- "Delay-penalized CTC implemented based on Finite State Transducer", Zengwei Yao, Wei Kang, Fangjun Kuang, Liyong Guo, Xiaoyu Yang, Yifan Yang, Long Lin, Daniel Povey, __Interspeech 2023__ [[pdf]](https://arxiv.org/pdf/2305.11539.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/669) + +- "Pruned RNN-T for fast, memory-efficient ASR training", Fangjun Kuang, Liyong Guo, Wei Kang, Long Lin, Mingshuang Luo, Zengwei Yao, Daniel Povey, __Interspeech 2022__ [[pdf]](https://arxiv.org/pdf/2206.13236.pdf) [[code]](https://github.com/k2-fsa/k2/pull/891) \ No newline at end of file diff --git a/docs-zh-CN/resources.md b/docs-zh-CN/resources.md new file mode 100644 index 0000000..41a458b --- /dev/null +++ b/docs-zh-CN/resources.md @@ -0,0 +1,10 @@ +--- +template: resources.html +title: 新一代 Kaldi 资源汇总 +--- + +# 新一代 Kaldi 资源汇总 + +此页面包含了新一代 Kaldi 发布的几乎全部资源,包含模型,演示程序,工具链等等,支持常用正则和关键字的搜索,欢迎使用。如果使用中遇到问题,你可以给我们[留言](./message.md),或者加入我们的[微信](../assets/pic/wechat_group.jpg){:target="_blank"}和[QQ](../assets/pic/qq_group.jpeg){:target="_blank"}社群和众多开发者一起交流学习!也欢迎大家关注我们的[微信公众号](../assets/pic/wechat_account.jpg){:target="_blank"}。 + +> 如果下载链接(尤其是 github/huggingface 链接)被墙无法下载,您可使用我们的提供的[下载工具](https://r.kingway.fun/k2-sync/download){:target="_blank"}下载。 \ No newline at end of file diff --git a/docs/blog/index.md b/docs/blog/index.md new file mode 100644 index 0000000..e69de29 diff --git a/docs/demos/asr.md b/docs/demos/asr.md new file mode 100644 index 0000000..5a96d6a --- /dev/null +++ b/docs/demos/asr.md @@ -0,0 +1,55 @@ +--- +comments: true +title: Demos for ASR +--- + +The Next-gen Kaldi not only provides solutions for [training speech recognition models](https://github.com/k2-fsa/icefall){:target="_blank"} and [deployment](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"}, but also releases a large number of pre-trained models and corresponding demo programs. + +## Huggingface space + +The most direct and convenient way to experience the Next-gen Kaldi is to visit our provided Huggingface space with a browser, which currently supports the experience of dozens of models in languages such as Chinese, English, Chinese-English, Chinese-English-Cantonese, Cantonese, Tibetan, Arabic, German, French, and Russian. + + +[Huggingface :hugging: Space](https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition){:target="_blank"}。 + +[![](../assets/images/asr_huggingface_en.png "Next-gen Kaldi speech recognition space")](https://huggingface.co/spaces/k2-fsa/automatic-speech-recognition){:target="_blank"} + + +### Webassembly + +The Next-gen Kaldi also offers [webassembly](https://webassembly.org/){:target="_blank"} support, which allows you run the models totally in the browser and no server is needed. Here are several pre-compiled models in the following table. If you want to package your own models using webassembly, plsease refer to [sherpa-onnx docs](https://k2-fsa.github.io/sherpa/onnx/wasm/index.html){:target="_blank"} and [sherpa-ncnn docs](https://k2-fsa.github.io/sherpa/ncnn/wasm/index.html){:target="_blank"} + +| Language | Encoder | Inference Engine | Address | Model link | +| ----------------------------- | ---------- | ------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| English | zipformer | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-en){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-en-2023-06-21-english){:target="_blank"} | +| Chinese & English | zipformer | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-transducer/zipformer-transducer-models.html#csukuangfj-sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20-bilingual-chinese-english){:target="_blank"} | +| Chinese & English | paraformer | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-en-paraformer){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-paraformer/paraformer-models.html#csukuangfj-sherpa-onnx-streaming-paraformer-bilingual-zh-en-chinese-english){:target="_blank"} | +| Chinese & English & Cantonese | paraformer | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-onnx-zh-cantonese-en-paraformer){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/online-paraformer/paraformer-models.html#csukuangfj-sherpa-onnx-streaming-paraformer-trilingual-zh-cantonese-en-chinese-cantonese-english){:target="_blank"} | +| English | zipformer | [ncnn](https://github.com/tencent/ncnn){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-ncnn-en){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/zipformer-transucer-models.html#csukuangfj-sherpa-ncnn-streaming-zipformer-en-2023-02-13-english){:target="_blank"} | +| Chinese & English | zipformer | [ncnn](https://github.com/tencent/ncnn){:target="_blank"} | [huggingface](https://huggingface.co/spaces/k2-fsa/web-assembly-asr-sherpa-ncnn-zh-en){:target="_blank"} | [Docs for the model](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/zipformer-transucer-models.html#csukuangfj-sherpa-ncnn-streaming-zipformer-bilingual-zh-en-2023-02-13-bilingual-chinese-english){:target="_blank"} | + + +### Subtitle extraction + +The subtitle extraction is a small tool we made based on speech recognition. Users can upload a video to generate the corresponding subtitle file. Huggingface space (support Chinese, English, Chinese-English, and Russian) [address :hugging:](https://huggingface.co/spaces/k2-fsa/generate-subtitles-for-videos){:target="_blank"}. + + +## Videos + +> Note: All videos are in Chinese! + +In order to make it easier for you to see the demo quickly, we have also made a lot of videos, see [:fontawesome-brands-bilibili: Bilibili](https://space.bilibili.com/1234519871/video?tid=0&special_type=&pn=1&keyword=&order=click){:target="_blank"} for details. + + +## apk & exe + +We also offer a number of compiled android APK and Windows executables. + +| Language | Engine | Platform | Download link | +| ----------------------------------------- | ------------------------------------------------------------------------- | -------- | ------------------------------------------------------------------------------------------------------ | +| Chinese | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | Android | [Link](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-zh.apk){:target="_blank"} | +| English | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | Android | [Link](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-en.apk){:target="_blank"} | +| English & Chinese | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | Android | [Link](../resources.md?s=onnx.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-bilingual.*apk){:target="_blank"} | +| English & Chinese | [ncnn](https://github.com/tencent/ncnn){:target="_blank"} | Android | [Link](../resources.md?s=ncnn.*(x86\|x86_64\|arm64-v8a\|armeabi-v7a)-bilingual.*apk){:target="_blank"} | +| All language(download models by yourself) | [onnxruntime](https://github.com/microsoft/onnxruntime){:target="_blank"} | Windows | [Link](../resources.md?s=asr.*exe){:target="_blank"} | +| All language(download models by yourself) | [ncnn](https://github.com/tencent/ncnn){:target="_blank"} | Windows | [Link](../resources.md?s=ncnn.*exe){:target="_blank"} | \ No newline at end of file diff --git a/docs/demos/kws.md b/docs/demos/kws.md new file mode 100644 index 0000000..d67919c --- /dev/null +++ b/docs/demos/kws.md @@ -0,0 +1,16 @@ +--- +comments: true +title: Demos for KWS +--- + +# Keyword spotting + +We offer Android apks and video demos for now, listed below. + + +| Language | Type | Address | Try it by yourself | +| -------- | ----------------- | ------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| English | apk | [arm64-v8a/armeabi-v7a/x86/x86_64](../resources.md?s=kws.*gigaspeech.*apk){:target="_blank"} | [model](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-gigaspeech-20240219.tar.gz) [code](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | +| Chinese | apk | [arm64-v8a/armeabi-v7a/x86/x86_64](../resources.md?s=kws.*wenetspeech.*apk){:target="_blank"} | [model](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) [code](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | +| Chinese | video(in Chinese) | [:fontawesome-brands-bilibili: Bilibili](https://www.bilibili.com/video/BV1Nw411J7K6){:target="_blank"} | [model](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) [code](https://github.com/k2-fsa/sherpa-onnx/tree/master/android/SherpaOnnxKws){:target="_blank"} | + diff --git a/docs/demos/tts.md b/docs/demos/tts.md new file mode 100644 index 0000000..0743371 --- /dev/null +++ b/docs/demos/tts.md @@ -0,0 +1,4 @@ +--- +comments: true +title: Demo for TTS +--- diff --git a/docs/events.md b/docs/events.md new file mode 100644 index 0000000..6d6a8a8 --- /dev/null +++ b/docs/events.md @@ -0,0 +1,25 @@ +--- +comments: true +title: Events for Next-gen Kaldi +--- + +## 2024 + +### March + +#### icefall + +- Add HLG decoding for streaming CTC (based on openfst). [code](https://github.com/k2-fsa/icefall/pull/1557){:target="_blank"} +- Add zipformer Cantonese recipe, using [MDCC](https://arxiv.org/pdf/2201.02419.pdf){:target="_blank"} dataset. [code](https://github.com/k2-fsa/icefall/pull/1537){:target="_blank"} [model](https://huggingface.co/zrjin/icefall-asr-mdcc-zipformer-2024-03-11/){:target="_blank"} +- Add finetuing recipe using [LoRA](https://arxiv.org/abs/2106.09685){:target="_blank"}. [code](https://github.com/k2-fsa/icefall/pull/1540){:target="_blank"} +- Add finetuing recipe using [adapter](https://arxiv.org/pdf/1902.00751.pdf){:target="_blank"}. [code](https://github.com/k2-fsa/icefall/pull/1512){:target="_blank"} [Doc](https://k2-fsa.github.io/icefall/recipes/Finetune/adapter/finetune_adapter.html){:target="_blank"} +- Add finetuing recipe using pre-trained zipformer model. [code](https://github.com/k2-fsa/icefall/pull/1484){:target="_blank"} [Doc](https://k2-fsa.github.io/icefall/recipes/Finetune/from_supervised/finetune_zipformer.html){:target="_blank"} +- Finetuing whisper using Wenetspeech & multi-zh-han dataset. [code](https://github.com/k2-fsa/icefall/pull/1483){:target="_blank"} + +#### sherpa + +- Add spoken language identification support (based on whisper) in sherpa-onnx. [huggingface space](https://huggingface.co/spaces/k2-fsa/spoken-language-identification){:target="_blank"} +- Add RISC-V support in sherpa-onnx。 +- Add python、c API and webassembly support for keyword spotting task in sherpa-onnx. +- Add android wear demo in sherpa-ncnn。[code](https://github.com/k2-fsa/sherpa-ncnn/pull/319){:target="_blank"} [video](https://www.bilibili.com/video/BV1qS421w7cK/){:target="_blank"} +- Inference Whisper using TensorRT-LLM in sherpa. [code](https://github.com/k2-fsa/sherpa/pull/551){:target="_blank"} diff --git a/docs/get-started/icefall.md b/docs/get-started/icefall.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/icefall.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/get-started/index.md b/docs/get-started/index.md new file mode 100644 index 0000000..3c7597a --- /dev/null +++ b/docs/get-started/index.md @@ -0,0 +1,18 @@ +--- +comments: true +--- + +# Geting started + +## Features + + The Next-gen Kaldi currently supports speech recognition (ASR), speech synthesis (TTS), keyword spotting (KWS), voice activity detection (VAD), speaker identification, spoken language identification, and so on. Some of them provide both training and deployment pipeline, while some are only deployment supports based on excellent third-party open-source libraries. The specific details are as follows: + +| Task | Training | Deployment | Docs | +|----------|---------|---------|-------------| +| Speech recognition(ASR) | :material-check: | :material-check: | [Training](./icefall.md) [Deployment](./sherpa/index.md) | +| Speech synthesis(TTS) | :material-check: | :material-check: | [Training](./sherpa/index.md) [Deployment](./sherpa/onnx.md) | +| Keyword spotting(KWS) | :material-check: | :material-check: | [Training](./icefall.md) [Deployment](./sherpa/onnx.md) | +| Voice activity detection(VAD) | :material-close: | :material-check: | [Deployment](./sherpa/onnx.md) | +| Speaker identification | :material-close: | :material-check: | [Deployment](./sherpa/onnx.md) | +| Spoken language identification | :material-close: | :material-check: | [Deployment](./sherpa/onnx.md) | diff --git a/docs/get-started/k2.md b/docs/get-started/k2.md new file mode 100644 index 0000000..aacbd45 --- /dev/null +++ b/docs/get-started/k2.md @@ -0,0 +1,6 @@ +--- +template: k2.html +comments: true +--- + +Only the latest several versions are listed above. If you want to compile from the source code, please refer to the detailed [installation document](https://k2-fsa.github.io/k2/installation/index.html){:target="_blank"} of the project. \ No newline at end of file diff --git a/docs/get-started/lhotse.md b/docs/get-started/lhotse.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/lhotse.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/get-started/sherpa/index.md b/docs/get-started/sherpa/index.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/sherpa/index.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/get-started/sherpa/ncnn.md b/docs/get-started/sherpa/ncnn.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/sherpa/ncnn.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/get-started/sherpa/onnx.md b/docs/get-started/sherpa/onnx.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/sherpa/onnx.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/get-started/sherpa/torch.md b/docs/get-started/sherpa/torch.md new file mode 100644 index 0000000..1b25559 --- /dev/null +++ b/docs/get-started/sherpa/torch.md @@ -0,0 +1,3 @@ +--- +comments: true +--- diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..509b685 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,5 @@ +--- +template: home.html +title: Next-gen Kaldi +--- + diff --git a/docs/message.md b/docs/message.md new file mode 100644 index 0000000..5adb3a1 --- /dev/null +++ b/docs/message.md @@ -0,0 +1,14 @@ +--- +comments: true +title: Message Board +--- + + +Leave us a message with any problems you encounter during the use of the Next-gen Kaldi, your opinions and suggestions on the Next-gen Kaldi are also welcome! diff --git a/docs/models/asr.md b/docs/models/asr.md new file mode 100644 index 0000000..f09ac6c --- /dev/null +++ b/docs/models/asr.md @@ -0,0 +1,20 @@ +--- +template: models.html +title: Pre-trained ASR models +comments: true +--- + +
+ +# Notes + +We only list some commonly used models above, more pre-trained models can be found as follows: + +Project | Inference engine | Docs link | +----|------|-----| +Sherpa | Libtorch | [Pre-trained models](https://k2-fsa.github.io/sherpa/sherpa/pretrained_models/index.html){:target="_blank"} | +Sherpa-onnx | onnxruntime | [Pre-trained models](https://k2-fsa.github.io/sherpa/onnx/pretrained_models/index.html){:target="_blank"} | +Sherpa-ncnn | ncnn | [Pre-trained models](https://k2-fsa.github.io/sherpa/ncnn/pretrained_models/index.html){:target="_blank"} | +Icefall | Pytorch | No specific links, please refers to the RESULTS.md file of each recipe in [icefall](https://github.com/k2-fsa/icefall){:target="_blank"} | + +Please **read the docs carefully** and select the suitable models as you need, if there is any unclear point, please leave us a comment. \ No newline at end of file diff --git a/docs/models/kws.md b/docs/models/kws.md new file mode 100644 index 0000000..932fa0b --- /dev/null +++ b/docs/models/kws.md @@ -0,0 +1,17 @@ +--- +title: Pre-trained Keyword spotting models +comments: true +--- + +# Keyword spotting models + +Currently, we offer two basic models in Chinese and English, both supporting the Pytorch and onnxruntime frameworks. The Pytorch model is mainly used for fine-tuning, while the onnx model is mainly used for deployment. You can first use onnx models to test the performance of the target keywords. If the expected results are not achieved, then consider fine-tuning based on the basic model we provide. + + +| Language | Framework | Download link | Usage | Description | +|---------|-------------|------------|--------------|--------------| +| Chinese | Pytorch | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-wenetspeech-20240219.tar.gz) | [Training and Fine-tuning](https://github.com/k2-fsa/icefall/pull/1428){:target="_blank"} | This model is trained on Wenetspeech L (10,000 hours), with a model parameter of about 3.3M. The modeling units are pinyin (initials and finals), and can be used as a basic model for fine-tuning. | +| Chinese | onnxruntime | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.1/sherpa-onnx-kws-zipformer-wenetspeech-3.3M-2024-01-01.tar.bz) | [Deployment docs](https://k2-fsa.github.io/sherpa/onnx/kws/pretrained_models/index.html#sherpa-onnx-kws-zipformer-wenetspeech-3-3m-2024-01-01-chinese){:target="_blank"} | This model is exported from the model above,could be used for deployment on [sherpa-onnx](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"} | +| English | Pytorch | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.11/icefall-kws-zipformer-gigaspeech-20240219.tar.gz) | [Training and Fine-tuning](https://github.com/k2-fsa/icefall/pull/1428){:target="_blank"} | This model is trained on Gigaspeech XL (10,000 hours), with a model parameter of about 3.3M. The modeling units are BPEs, and can be used as a basic model for fine-tuning. | +| English | onnxruntime | [github](https://github.com/pkufool/keyword-spotting-models/releases/download/v0.1/sherpa-onnx-kws-zipformer-gigaspeech-3.3M-2024-01-01.tar.bz) | [Deployment docs](https://k2-fsa.github.io/sherpa/onnx/kws/pretrained_models/index.html#sherpa-onnx-kws-zipformer-gigaspeech-3-3m-2024-01-01-english){:target="_blank"} | This model is exported from the model above,could be used for deployment on [sherpa-onnx](https://github.com/k2-fsa/sherpa-onnx){:target="_blank"} | + diff --git a/docs/models/tts.md b/docs/models/tts.md new file mode 100644 index 0000000..add095a --- /dev/null +++ b/docs/models/tts.md @@ -0,0 +1,4 @@ +--- +title: Pre-trained TTS models +comments: true +--- diff --git a/docs/publications.md b/docs/publications.md new file mode 100644 index 0000000..955fe51 --- /dev/null +++ b/docs/publications.md @@ -0,0 +1,24 @@ +--- +comments: true +title: Publications for Next-gen Kaldi +--- + +We only list the papers written during the time Next-gen Kaldi team develop the Next-gen Kaldi toolkits, for more papers of Daniel Povey, please see his [personal page](https://danielpovey.com/publications.html) or [google scholar](https://scholar.google.com/citations?hl=zh-CN&user=y_-5FWAAAAAJ&view_op=list_works&sortby=pubdate). + +- "Zipformer: A faster and better encoder for automatic speech recognition", Zengwei Yao, Liyong Guo, Xiaoyu Yang, Wei Kang, Fangjun Kuang, Yifan Yang, Zengrui Jin, Long Lin, Daniel Povey, __ICLR 2024__ [[pdf]](https://arxiv.org/pdf/2310.11230.pdf) [[code]](https://github.com/k2-fsa/icefall/blob/master/egs/librispeech/ASR/zipformer/zipformer.py) + +- "Libriheavy: a 50,000 hours asr corpus with punctuation casing and context", Wei Kang, Xiaoyu Yang, Zengwei Yao, Fangjun Kuang, Yifan Yang, Liyong Guo, Long Lin, Daniel Povey, __ICASSP 2024__ [[pdf]](https://arxiv.org/pdf/2309.08105.pdf) [[code]](https://github.com/k2-fsa/libriheavy) + +- "PromptASR for contextualized ASR with controllable style", Xiaoyu Yang, Wei Kang, Zengwei Yao, Yifan Yang, Liyong Guo, Fangjun Kuang, Long Lin, Daniel Povey, __ICASSP 2024__ [[pdf]](https://arxiv.org/pdf/2309.07414.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/1250) + +- "Delay-penalized transducer for low-latency streaming asr", Wei Kang, Zengwei Yao, Fangjun Kuang, Liyong Guo, Xiaoyu Yang, Long Lin, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00490.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/654) [[code k2]](https://github.com/k2-fsa/k2/pull/976) + +- "Fast and parallel decoding for transducer", Wei Kang, Liyong Guo, Fangjun Kuang, Long Lin, Mingshuang Luo, Zengwei Yao, Xiaoyu Yang, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00484.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/250) [[code k2]](https://github.com/k2-fsa/k2/pull/926) + +- "Predicting multi-codebook vector quantization indexes for knowledge distillation", Liyong Guo, Xiaoyu Yang, Quandong Wang, Yuxiang Kong, Zengwei Yao, Fan Cui, Fangjun Kuang, Wei Kang, Long Lin, Mingshuang Luo, Piotr Żelasko, Daniel Povey, __ICASSP 2023__ [[pdf]](https://arxiv.org/pdf/2211.00508.pdf) [[code icefall]](https://github.com/k2-fsa/icefall/pull/387) [[code]](https://github.com/k2-fsa/multi_quantization) + +- "Blank-regularized ctc for frame skipping in neural transducer", Yifan Yang, Xiaoyu Yang, Liyong Guo, Zengwei Yao, Wei Kang, Fangjun Kuang, Long Lin, Xie Chen, Daniel Povey, __Interspeech 2023__ [[pdf]](https://arxiv.org/pdf/2305.11558.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/730) + +- "Delay-penalized CTC implemented based on Finite State Transducer", Zengwei Yao, Wei Kang, Fangjun Kuang, Liyong Guo, Xiaoyu Yang, Yifan Yang, Long Lin, Daniel Povey, __Interspeech 2023__ [[pdf]](https://arxiv.org/pdf/2305.11539.pdf) [[code]](https://github.com/k2-fsa/icefall/pull/669) + +- "Pruned RNN-T for fast, memory-efficient ASR training", Fangjun Kuang, Liyong Guo, Wei Kang, Long Lin, Mingshuang Luo, Zengwei Yao, Daniel Povey, __Interspeech 2022__ [[pdf]](https://arxiv.org/pdf/2206.13236.pdf) [[code]](https://github.com/k2-fsa/k2/pull/891) \ No newline at end of file diff --git a/docs/resources.md b/docs/resources.md new file mode 100644 index 0000000..252963b --- /dev/null +++ b/docs/resources.md @@ -0,0 +1,8 @@ +--- +template: resources.html +title: Next-gen Kaldi Resources +--- + +# Next-gen Kaldi Resources + +This page contains almost all the resources released by Next-gen Kaldi, including models, demo programs, toolchains, etc. It supports searches with common regular expressions and keywords. You can leave us a [MESSAGE](./message.md) or file an issue on [GITHUB](https://github.com/k2-fsa){:target="_blank"} if encountering any problems. \ No newline at end of file diff --git a/index.html b/index.html deleted file mode 100644 index 553c1f2..0000000 --- a/index.html +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - k2-fsa - - - -
-
- - -
- -
- UNDER CONSTRUCTION! IGNORE THE REST OF THIS TEXT! -
-
- - -
- -
- -
-
- - - - - diff --git a/mkdocs-zh-CN.yml b/mkdocs-zh-CN.yml new file mode 100644 index 0000000..1c14ffc --- /dev/null +++ b/mkdocs-zh-CN.yml @@ -0,0 +1,168 @@ +# Project information +site_name: 新一代 Kaldi +site_url: https://k2-fsa.org/zh-CN +docs_dir: 'docs-zh-CN' +site_dir: 'site/zh-CN' +site_author: 新一代 Kaldi 团队 +site_description: >- + 新一代 Kaldi + +# Repository +repo_name: https://github.com/k2-fsa +repo_url: https://github.com/k2-fsa + +# Copyright +copyright: Copyright © 2019 - 2024 新一代 Kaldi 团队 + +theme: + name: material + custom_dir: custom + logo: assets/pic/k2.png + favicon: assets/pic/k2.png + language: zh + features: + - announce.dismiss + - content.code.annotate + - content.code.copy + - content.tooltips + - navigation.indexes + - navigation.sections + - navigation.expand + - navigation.footer + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - search.highlight + - search.share + - search.suggest + - toc.follow + + palette: + primary: custom + scheme: k2 + + font: + text: Roboto + code: Roboto Mono + +plugins: + - blog + - search: + separator: '[\s\u200b\-]' + - minify: + minify_html: true + minify_js: true + minify_css: true + htmlmin_opts: + remove_comments: true + - mkdocs-jupyter: + include_source: True + include_requirejs: true + - git-committers: + repository: pkufool/k2-fsa-www + branch: main_page + - git-revision-date-localized: + enable_creation_date: true + type: date + +extra_javascript: + - assets/javascripts/mathjax.js + - https://polyfill.io/v3/polyfill.min.js?features=es6 + - https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js + +extra_css: + - assets/stylesheets/extra.css + +nav: + - 主页: index.md + - 从这开始: + - get-started/index.md + - k2: get-started/k2.md + - Icefall: get-started/icefall.md + - Lhotse: get-started/lhotse.md + - Sherpa: + - get-started/sherpa/index.md + - Sherpa-torch: get-started/sherpa/torch.md + - Sherpa-ncnn: get-started/sherpa/ncnn.md + - Sherpa-onnx: get-started/sherpa/onnx.md + - 模型: + - 语音识别: models/asr.md + - 语音合成: models/tts.md + - 自定义唤醒: models/kws.md + - 演示: + - 语音识别: demos/asr.md + - 语音合成: demos/tts.md + - 自定义唤醒: demos/kws.md + - 事件: events.md + - 论文: publications.md + - 资源: resources.md + - 博客: + - blog/index.md + - 留言板: message.md + +# Extensions +markdown_extensions: + - abbr + - admonition + - attr_list + - def_list + - footnotes + - md_in_html + - toc: + permalink: true + slugify: !!python/object/apply:pymdownx.slugs.slugify + kwds: + case: lower + - pymdownx.arithmatex: + generic: true + - pymdownx.betterem: + smart_enable: all + - pymdownx.caret + - pymdownx.details + - pymdownx.emoji: + emoji_generator: !!python/name:material.extensions.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + - pymdownx.highlight: + anchor_linenums: true + line_spans: __span + pygments_lang_class: true + - pymdownx.inlinehilite + - pymdownx.keys + - pymdownx.mark + - pymdownx.smartsymbols + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tilde + + +# Customization +extra: + alternate: + - name: English + link: / + lang: en + - name: 中文 + link: /zh-CN/ + lang: zh + + social: + - icon: fontawesome/brands/github + link: https://github.com/k2-fsa + - icon: fontawesome/brands/weixin + link: https://k2-fsa.org/assets/pic/wechat_account.jpg + - icon: fontawesome/brands/weixin + link: https://k2-fsa.org/assets/pic/wechat_group.jpg + - icon: fontawesome/brands/qq + link: https://k2-fsa.org/assets/pic/qq_group.jpeg + - icon: fontawesome/brands/zhihu + link: https://www.zhihu.com/people/yaozengwei + - icon: fontawesome/brands/bilibili + link: https://space.bilibili.com/1234519871 diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..e47194f --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,164 @@ +# Project information +site_name: Next-gen Kaldi +site_url: https://k2-fsa.org +docs_dir: 'docs' +site_dir: 'site' +site_author: Next-gen Kaldi team +site_description: >- + Next-gen Kaldi + +# Repository +repo_name: https://github.com/k2-fsa +repo_url: https://github.com/k2-fsa + +# Copyright +copyright: Copyright © 2019 - 2024 Next-gen Kaldi team + +theme: + name: material + logo: assets/pic/k2.png + favicon: assets/pic/k2.png + language: en + custom_dir: custom + features: + - announce.dismiss + - content.code.annotate + - content.code.copy + - content.tooltips + - navigation.indexes + - navigation.sections + - navigation.expand + - navigation.footer + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - search.highlight + - search.share + - search.suggest + - toc.follow + + palette: + primary: custom + scheme: k2 + + font: + text: Roboto + code: Roboto Mono + +plugins: + - search + - blog + - minify: + minify_html: true + minify_js: true + minify_css: true + htmlmin_opts: + remove_comments: true + - mkdocs-jupyter: + include_source: True + include_requirejs: true + - git-committers: + repository: pkufool/k2-fsa-www + branch: main_page + - git-revision-date-localized: + enable_creation_date: true + type: date + +extra_javascript: + - assets/javascripts/mathjax.js + - https://polyfill.io/v3/polyfill.min.js?features=es6 + - https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js + +extra_css: + - assets/stylesheets/extra.css + +nav: + - Home: index.md + - Get started: + - get-started/index.md + - k2: get-started/k2.md + - Icefall: get-started/icefall.md + - Lhotse: get-started/lhotse.md + - Sherpa: + - get-started/sherpa/index.md + - Sherpa-torch: get-started/sherpa/torch.md + - Sherpa-ncnn: get-started/sherpa/ncnn.md + - Sherpa-onnx: get-started/sherpa/onnx.md + - Models: + - ASR: models/asr.md + - TTS: models/tts.md + - KWS: models/kws.md + - Demos: + - ASR: demos/asr.md + - TTS: demos/tts.md + - KWS: demos/kws.md + - Events: events.md + - Publications: publications.md + - Resources: resources.md + - Blog: + - blog/index.md + - Message board: message.md + +# Extensions +markdown_extensions: + - abbr + - admonition + - attr_list + - def_list + - footnotes + - md_in_html + - toc: + permalink: true + - pymdownx.arithmatex: + generic: true + - pymdownx.betterem: + smart_enable: all + - pymdownx.caret + - pymdownx.details + - pymdownx.emoji: + emoji_generator: !!python/name:material.extensions.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji + - pymdownx.highlight: + anchor_linenums: true + line_spans: __span + pygments_lang_class: true + - pymdownx.inlinehilite + - pymdownx.keys + - pymdownx.magiclink: + repo_url_shorthand: true + user: squidfunk + repo: mkdocs-material + - pymdownx.mark + - pymdownx.smartsymbols + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tilde + + +# Customization +extra: + alternate: + - name: English + link: / + lang: en + - name: 中文 + link: /zh-CN/ + lang: zh + + social: + - icon: fontawesome/brands/github + link: https://github.com/k2-fsa + - icon: fontawesome/brands/weixin + link: https://k2-fsa.org/assets/pic/wechat_account.jpg + - icon: fontawesome/brands/weixin + link: https://k2-fsa.org/assets/pic/wechat_group.jpg + - icon: fontawesome/brands/qq + link: https://k2-fsa.org/assets/pic/qq_group.jpeg diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e11c328 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +mkdocs-material +jieba +mkdocs-minify-plugin +mkdocs-jupyter +mkdocs-git-revision-date-localized-plugin +mkdocs-git-committers-plugin-2 +requests \ No newline at end of file diff --git a/scripts/generate_k2_wheel.py b/scripts/generate_k2_wheel.py new file mode 100644 index 0000000..8d83eb5 --- /dev/null +++ b/scripts/generate_k2_wheel.py @@ -0,0 +1,93 @@ +import json +import requests +import re + +K2_CUDA_PAGE="https://k2-fsa.github.io/k2/cuda.html" +K2_CPU_PAGE="https://k2-fsa.github.io/k2/cpu.html" + +k2_wheels = {} + +# for linux cuda +r = requests.get(K2_CUDA_PAGE) +url_pattern = r"href=\"(.*)\">" +pattern = r"k2-(\d\.\d+\.\d+\.dev\d{8})\+cuda(\d+\.\d)\.torch(\d\.\d+\.\d(\.dev\d{8})?)-(\S+)-(\S+)-(\S+)\.whl" +for line in r.text.split("
"): + m = re.search(url_pattern, line) + if m is None: + continue + url = m.group(1) + file_name = url.split("/")[-1] + m = re.search(pattern, file_name) + assert m is not None + k2_version, cuda_version, torch_version, os = m.group(1), m.group(2), m.group(3), m.group(7) + if "manylinux" not in os: + continue + install = f"""# Install Pytorch + pip install torch=={torch_version} --index-url https://download.pytorch.org/whl/cu{cuda_version.replace(".","")} + + # Install k2 + pip install k2=={k2_version}+cuda{cuda_version}.torch{torch_version} -f https://k2-fsa.github.io/k2/cuda.html + """ + k2_wheels[(k2_version, "Linux", torch_version, cuda_version)] = install + +# for cpu (windows, macos, linux) +r = requests.get(K2_CPU_PAGE) +url_pattern = r"href=\"(.*)\">" +pattern = r"k2-(\d\.\d+\.\d+\.dev\d{8})\+cpu\.torch(\d\.\d+\.\d(\.dev\d{8})?)-(\S+)-(\S+)-(\S+)\.whl" +for line in r.text.split("
"): + m = re.search(url_pattern, line) + if m is None: + continue + url = m.group(1) + file_name = url.split("/")[-1] + m = re.search(pattern, file_name) + assert m is not None + k2_version, torch_version, _, _, _, os = m.group(1), m.group(2), m.group(3),m.group(4),m.group(5), m.group(6) + if "macosx" in os: + os = "Macos" + elif "win" in os: + os = "Windows" + elif "manylinux" in os: + os = "Linux" + else: + continue + + install = f"""# Install Pytorch + pip install torch=={torch_version} --index-url https://download.pytorch.org/whl/cpu + + # Install k2 + pip install k2=={k2_version}+cpu.torch{torch_version} -f https://k2-fsa.github.io/k2/cpu.html + """ + k2_wheels[(k2_version, os, torch_version, "CPU")] = install + +En_wheels = [] +Cn_wheels = [] # Cn might have different index page +versions = {} # +for k in sorted(k2_wheels.keys(), reverse=True): + v = k2_wheels[k] + k2, os, torch, platform = k + # only display version as major.minor.patch + v_k2 = k2.split(".dev")[0] + v_torch = torch.split(".dev")[0] + v_k = (v_k2, os, v_torch, platform) + if v_k not in versions: + En_wheels.append({ + "build" : v_k2, + "os" : os, + "pytorch" : v_torch, + "platform" : platform if platform == "CPU" else f"CUDA{platform.replace('.', '')}", + "install" : v + }) + Cn_wheels.append({ + "build" : v_k2, + "os" : os, + "pytorch" : v_torch, + "platform" : platform if platform == "CPU" else f"CUDA{platform.replace('.', '')}", + "install" : v.replace("cpu.html", "cpu-cn.html") if platform == "CPU" else v.replace("cuda.html", "cuda-cn.html") + }) + versions[v_k] = 1 + +with open("custom/assets/data/k2_whl_en.json", "w") as f: + json.dump(En_wheels, f, indent=2) +with open("custom/assets/data/k2_whl_cn.json", "w") as f: + json.dump(Cn_wheels, f, indent=2) \ No newline at end of file diff --git a/scripts/generate_resources.py b/scripts/generate_resources.py new file mode 100644 index 0000000..5737662 --- /dev/null +++ b/scripts/generate_resources.py @@ -0,0 +1,130 @@ +import requests +import re +from typing import Dict, List, Set + +def generate_html(resources: Dict[str, str]): + html = """ + {% extends "main.html" %} + + {% block styles %} + {{ super() }} + + {% endblock %} + + {% block content %} + {{ super() }} +
+ + + + """ + + for k, v in resources.items(): + html += f'\n' + + html += """ + +
{k}
+
+ + {% endblock %} + + {% block scripts %} + {{ super() }} + + + + {% endblock %} + """ + return html + +def get_apks(link: str, resources: Dict[str, str]): + r = requests.get(link) + url_pattern = r"href=\"(.*)\".*>" + latest_version = None + for line in r.text.split("
"): + m = re.search(url_pattern, line) + if m is None: + continue + url = m.group(1).strip() + name = url.split("/")[-1] + if name.endswith("apk"): + version = "-".join(name.split("-")[0:3]) + if latest_version is None: + latest_version = version + else: + if version != latest_version: + continue + resources[name] = url + +def get_releases(link: str, resources: Dict[str, str], tags: Set[str]): + r = requests.get(link) + releases = r.json() + for i, release in enumerate(releases): + if i == 0 or release["tag_name"] in tags: + for asset in release["assets"]: + url = asset["browser_download_url"] + name = url.split("/")[-1] + resources[name] = url + + +SHERPA_ONNX_RELEASE="https://api.github.com/repos/k2-fsa/sherpa-onnx/releases" +SHERPA_NCNN_RELEASE="https://api.github.com/repos/k2-fsa/sherpa-ncnn/releases" +SP_ID_APK="https://k2-fsa.github.io/sherpa/onnx/speaker-identification/apk.html" +TTS_APK="https://k2-fsa.github.io/sherpa/onnx/tts/apk.html" +TTS_ENGINE_APK="https://k2-fsa.github.io/sherpa/onnx/tts/apk-engine.html" +resources = {} + +get_apks(SP_ID_APK, resources) +get_apks(TTS_APK, resources) +get_apks(TTS_ENGINE_APK, resources) +get_releases(SHERPA_ONNX_RELEASE, resources, set(["kws-models", "speaker-recongition-models", "tts-models", "asr-models"])) +get_releases(SHERPA_NCNN_RELEASE, resources, set(["models"])) + +with open("custom/resources.html", "w") as f: + f.write(generate_html(resources)) diff --git a/style.css b/style.css deleted file mode 100644 index 01cdb66..0000000 --- a/style.css +++ /dev/null @@ -1,54 +0,0 @@ -a,h1,h2,h3,h4,body,html,div,textarea,input{ -font-family: Calibri,Arial,Sans Serif; -line-height: 1.3em; - -} -.currentPage{ -color:red; -} -#centeredContainer{ -width:1060px; -margin:0 auto; -font-size:1.1em; -padding-top:1em; -} -#leftCol{ -float:left; -width:710px; -padding-top:1em; -} -#rightCol{ -float:right; -width:230px; -padding-top:1em; -} -a:hover{ -color:blue; -} -a{ -text-decoration:none; -} -#top{ -word-spacing:12px; -padding-left:15px; -white-space:nowrap; -padding-top:6px; -background-color: #d3d3d3; -height:2em; -} -.contact_info{ -font-family: Calibri,Arial,Sans Serif; -line-height: 1.3em; -} -.contactTitle{ -font-size: 16px; -font-weight:bold; -border-bottom:2px solid #1a1a1a; -margin:20px 0 10px 0; -} -#footer{ -float:right; -} - - -