diff --git a/.github/workflows/commit-check.yml b/.github/workflows/commit-check.yml
index 5f71d78b97307..36d6425e58096 100644
--- a/.github/workflows/commit-check.yml
+++ b/.github/workflows/commit-check.yml
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-name: Code Style, Abandoned Tests
+name: Code Style, Abandoned Tests, Javadocs
on:
pull_request:
push:
@@ -34,12 +34,12 @@ jobs:
java: [ '8', '11' ]
name: Check java code on JDK ${{ matrix.java }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup java
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: ${{ matrix.java }}
@@ -67,16 +67,20 @@ jobs:
run : |
./mvnw test -Pcheck-test-suites,all-java,all-scala,scala -B -V
+ - name: Check javadocs.
+ run : |
+ ./mvnw -DskipTests install -pl modules/tools -B -V && ./mvnw initialize -Pjavadoc -B -V
+
check-dotnet:
name: Сheck .NET code
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Setup .NET SDK
- uses: actions/setup-dotnet@v3
+ uses: actions/setup-dotnet@v4
with:
dotnet-version: '6.0.x'
@@ -96,12 +100,12 @@ jobs:
- { python: "3.8", toxenv: "py38" }
- { python: "3.8", toxenv: "codestyle" }
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.cfg.python}}
diff --git a/.github/workflows/sonar-branch.yml b/.github/workflows/sonar-branch.yml
index f7a4c98914f68..90463bb1369c3 100644
--- a/.github/workflows/sonar-branch.yml
+++ b/.github/workflows/sonar-branch.yml
@@ -23,34 +23,40 @@ on:
- master
- 'ignite-[0-9].[0-9]+.[0-9]+*'
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
jobs:
sonarcloud:
name: Sonar Analysis
if: github.repository == 'apache/ignite'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
+ persist-credentials: false
+ # "fetch-depth: 0" is needed for Sonar's new code detection, blame information and issue backdating
+ # see more details at https://community.sonarsource.com/t/git-fetch-depth-implications/75260
- - name: Setup java
- uses: actions/setup-java@v3
+ - name: Setup JDK11
+ uses: actions/setup-java@v4
with:
distribution: 'temurin'
java-version: 11
- cache: 'maven'
- name: Cache SonarCloud packages
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Cache Maven packages
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
- path: ~/.m2
+ path: ~/.m2/repository
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
@@ -59,7 +65,13 @@ jobs:
- name: Build with Maven
run: |
- ./mvnw install -P all-java,lgpl,examples,skip-docs -DskipTests -B -V
+ ./mvnw install -P all-java,lgpl,examples,skip-docs -DskipTests -B -V
+
+ - name: Setup JDK17
+ uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: 17
- name: Sonar Analyze Upload
run: >
@@ -67,7 +79,7 @@ jobs:
-P all-java,lgpl,examples,skip-docs
-Dsonar.branch.name=${{ github.ref_name }}
-Dsonar.projectKey=apache_ignite
- -Dsonar.login=${SONARCLOUD_TOKEN}
+ -Dsonar.token=${SONARCLOUD_TOKEN}
-B -V
env:
MAVEN_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
diff --git a/.github/workflows/sonar-pr-from-fork-build.yml b/.github/workflows/sonar-pr-from-fork-build.yml
new file mode 100644
index 0000000000000..a1c8fc4bc0b4f
--- /dev/null
+++ b/.github/workflows/sonar-pr-from-fork-build.yml
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: SonarBuild
+
+on: pull_request
+
+permissions:
+ contents: read
+
+concurrency:
+ group: sonar-pr-workflow-${{ github.event.pull_request.head.repo.full_name }}-${{ github.event.pull_request.head.ref }}
+ cancel-in-progress: true
+
+jobs:
+ build:
+ if: github.repository == 'apache/ignite'
+ name: Build artifacts for Sonar Analysis
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: "refs/pull/${{ github.event.number }}/merge"
+ persist-credentials: false
+
+ - name: Set up JDK11
+ uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: '11'
+
+ - name: Cache local Maven repository
+ uses: actions/cache@v4
+ with:
+ path: ~/.m2/repository
+ key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ ${{ runner.os }}-m2
+
+ - name: Install Libnuma
+ run: sudo apt-get update && sudo apt-get install libnuma-dev
+
+ - name: Build with Maven
+ run: |
+ ./mvnw install -P all-java,lgpl,examples,skip-docs -DskipTests -B -V
+
+ - name: Prepare compiled classes artifact
+ shell: bash
+ run: find -iname "*target" -type d -exec tar -rf target.tar {} \+
+
+ - name: Upload compiled classes artifact
+ uses: actions/upload-artifact@v4
+ id: target-artifact-upload-step
+ with:
+ name: target-artifact
+ path: |
+ target.tar
+ if-no-files-found: error
+ retention-days: 1
+
+ - name: Prepare pull request artifact
+ shell: bash
+ run: |
+ echo ${{ github.event.pull_request.number }} >> pr-event.txt
+ echo ${{ github.event.pull_request.head.ref }} >> pr-event.txt
+ echo ${{ github.event.pull_request.base.ref }} >> pr-event.txt
+ echo ${{ github.event.pull_request.head.sha }} >> pr-event.txt
+ echo ${{ steps.target-artifact-upload-step.outputs.artifact-id }} >> pr-event.txt
+
+ - name: Upload pull request event artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: pr-event-artifact
+ path: |
+ pr-event.txt
+ if-no-files-found: error
+ retention-days: 1
diff --git a/.github/workflows/sonar-pr-from-fork-scan.yml b/.github/workflows/sonar-pr-from-fork-scan.yml
new file mode 100644
index 0000000000000..f32fd998037cc
--- /dev/null
+++ b/.github/workflows/sonar-pr-from-fork-scan.yml
@@ -0,0 +1,175 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: Sonar Quality Pull Request Analysis
+
+on:
+ workflow_run:
+ workflows: [SonarBuild]
+ types: [completed]
+
+concurrency:
+ group: sonar-pr-workflow-${{ github.event.workflow_run.head_repository.full_name }}-${{ github.event.workflow_run.head_branch }}
+ cancel-in-progress: true
+
+jobs:
+ sonarcloud:
+ if: ${{ github.event.workflow_run.conclusion == 'success' && github.repository == 'apache/ignite' }}
+ name: Sonar Analysis
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ actions: write
+ checks: write
+ steps:
+ - name: Download pull request event artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: pr-event-artifact
+ run-id: ${{ github.event.workflow_run.id }}
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Read pull request event
+ shell: bash
+ run: |
+ echo "pr_number=$(sed '1q;d' pr-event.txt)" >> "$GITHUB_ENV"
+ echo "pr_head_ref=$(sed '2q;d' pr-event.txt)" >> "$GITHUB_ENV"
+ echo "pr_base_ref=$(sed '3q;d' pr-event.txt)" >> "$GITHUB_ENV"
+ echo "pr_head_sha=$(sed '4q;d' pr-event.txt)" >> "$GITHUB_ENV"
+ echo "target_artifact_id=$(sed '5q;d' pr-event.txt)" >> "$GITHUB_ENV"
+
+ - name: Create new PR check
+ uses: actions/github-script@v7
+ id: check
+ with:
+ script: |
+ const jobs_response = await github.rest.actions.listJobsForWorkflowRunAttempt({
+ ...context.repo,
+ run_id: context.runId,
+ attempt_number: process.env.GITHUB_RUN_ATTEMPT,
+ });
+
+ const job_url = jobs_response.data.jobs[0].html_url;
+
+ const check_response = await github.rest.checks.create({
+ ...context.repo,
+ name: 'Sonar Quality Pull Request Analysis',
+ head_sha: process.env.pr_head_sha,
+ status: 'in_progress',
+ output: {
+ title: 'Sonar Quality Pull Request Analysis',
+ summary: '[Details ...](' + job_url + ')'
+ }
+ });
+
+ return check_response.data.id;
+ result-encoding: string
+
+ - name: Checkout PR head branch
+ uses: actions/checkout@v4
+ with:
+ repository: ${{ github.event.workflow_run.head_repository.full_name }}
+ ref: ${{ github.event.workflow_run.head_branch }}
+ fetch-depth: 0
+ # "fetch-depth: 0" is needed for Sonar's new code detection, blame information and issue backdating
+ # see more details at https://community.sonarsource.com/t/git-fetch-depth-implications/75260
+
+ - name: Checkout PR base branch
+ run: |
+ git remote add upstream ${{ github.event.repository.clone_url }}
+ git fetch upstream
+ git checkout -B $pr_base_ref upstream/$pr_base_ref
+ git checkout ${{ github.event.workflow_run.head_branch }}
+ git clean -ffdx && git reset --hard HEAD
+
+ - name: Download compiled classes artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: target-artifact
+ run-id: ${{ github.event.workflow_run.id }}
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Delete compiled classes artifact
+ if: always()
+ uses: actions/github-script@v7
+ with:
+ script: |
+ await github.rest.actions.deleteArtifact({
+ ...context.repo,
+ artifact_id: process.env.target_artifact_id
+ });
+
+ - name: Extract compiled classes artifact
+ shell: bash
+ run: tar -xf target.tar
+
+ - name: Set up JDK17
+ uses: actions/setup-java@v4
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+
+ - name: Cache SonarCloud packages
+ uses: actions/cache@v4
+ with:
+ path: ~/.sonar/cache
+ key: ${{ runner.os }}-sonar
+
+ - name: Cache local Maven repository
+ uses: actions/cache@v4
+ with:
+ path: ~/.m2/repository
+ key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ ${{ runner.os }}-m2
+
+ - name: Sonar Analyze Upload
+ shell: bash
+ run: >
+ ./mvnw org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
+ -P all-java,lgpl,examples,skip-docs
+ -Dsonar.scm.revision=${{ github.event.workflow_run.head_sha }}
+ -Dsonar.pullrequest.branch=${{ env.pr_head_ref }}
+ -Dsonar.pullrequest.base=${{ env.pr_base_ref }}
+ -Dsonar.pullrequest.key=${{ env.pr_number }}
+ -Dsonar.pullrequest.github.repository=apache/ignite
+ -Dsonar.pullrequest.provider=GitHub
+ -Dsonar.pullrequest.github.summary_comment=true
+ -Dsonar.projectKey=apache_ignite
+ -Dsonar.token=${{ secrets.SONARCLOUD_TOKEN }}
+ -B -V
+ env:
+ MAVEN_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
+ SONAR_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
+ JAVA_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
+
+ - name: Update status of PR check
+ uses: actions/github-script@v7
+ if: always()
+ env:
+ CHECK_ID: ${{ steps.check.outputs.result }}
+ JOB_STATUS: ${{ job.status }}
+ with:
+ script: |
+ const { CHECK_ID, JOB_STATUS } = process.env;
+
+ await github.rest.checks.update({
+ ...context.repo,
+ check_run_id: CHECK_ID,
+ status: 'completed',
+ conclusion: JOB_STATUS
+ });
diff --git a/.github/workflows/sonar-pr-from-fork.yml b/.github/workflows/sonar-pr-from-fork.yml
deleted file mode 100644
index f4309f68ce6a4..0000000000000
--- a/.github/workflows/sonar-pr-from-fork.yml
+++ /dev/null
@@ -1,91 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-name: Sonar Quality Pull Request Analysis
-
-# TODO IGNITE-20466 Investigate and fix the issue with running this workflow on PRs from forks.
-on: pull_request
-
-permissions:
- contents: read
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- sonarcloud:
- if: github.repository == 'apache/ignite'
- name: Sonar Analysis
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- with:
- ref: "refs/pull/${{ github.event.number }}/merge"
- fetch-depth: 0
-
- - name: Set up JDK11
- uses: actions/setup-java@v3
- with:
- distribution: 'temurin'
- java-version: '11'
- cache: 'maven'
-
- - name: Cache SonarCloud packages
- uses: actions/cache@v3
- with:
- path: ~/.sonar/cache
- key: ${{ runner.os }}-sonar
- restore-keys: ${{ runner.os }}-sonar
-
- - name: Cache local Maven repository
- uses: actions/cache@v3
- with:
- path: ~/.m2/repository
- key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
- restore-keys: |
- ${{ runner.os }}-m2
-
- - name: Install Libnuma
- run: sudo apt-get update && sudo apt-get install libnuma-dev
-
- - name: Build with Maven
- run: |
- ./mvnw clean install -P all-java,lgpl,examples,skip-docs -DskipTests -B -V
-
- - name: Sonar Analyze Upload
- run: >
- ./mvnw org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
- -P all-java,lgpl,examples,skip-docs
- -Dsonar.pullrequest.branch=${PULLREQUEST_BRANCH}
- -Dsonar.pullrequest.base=${PULLREQUEST_BASE}
- -Dsonar.pullrequest.key=${PULLREQUEST_KEY}
- -Dsonar.pullrequest.github.repository=apache/ignite
- -Dsonar.pullrequest.provider=GitHub
- -Dsonar.pullrequest.github.summary_comment=true
- -Dsonar.projectKey=apache_ignite
- -Dsonar.login=${SONARCLOUD_TOKEN}
- -B -V
- env:
- MAVEN_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
- SONAR_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
- JAVA_OPTS: "-XX:+UseG1GC -XX:InitialHeapSize=2g -XX:MaxHeapSize=6g -XX:+UseStringDeduplication"
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SONARCLOUD_TOKEN: ${{ secrets.SONARCLOUD_TOKEN }}
- PULLREQUEST_BRANCH: ${{ github.head_ref }}
- PULLREQUEST_BASE: ${{ github.base_ref }}
- PULLREQUEST_KEY: ${{ github.event.pull_request.number }}
diff --git a/docs/_docs/extensions-and-integrations/change-data-capture-extensions.adoc b/docs/_docs/extensions-and-integrations/change-data-capture-extensions.adoc
index 5973b34607eed..98d6cfc7f32fd 100644
--- a/docs/_docs/extensions-and-integrations/change-data-capture-extensions.adoc
+++ b/docs/_docs/extensions-and-integrations/change-data-capture-extensions.adoc
@@ -51,12 +51,13 @@ image:../../assets/images/integrations/CDC-ignite2igniteClient.svg[]
=== Metrics
+[cols="25%,75%",opts="header"]
|===
|Name |Description
| `EventsCount` | Count of messages applied to destination cluster.
-| `LastEventTime` | Timestamp of last applied event.
-| `TypesCount` | Count of received binary types events.
-| `MappingsCount` | Count of received mappings events.
+| `LastEventTime` | Timestamp of last applied event to destination cluster.
+| `TypesCount` | Count of binary types events applied to destination cluster.
+| `MappingsCount` | Count of mappings events applied to destination cluster
|===
== Ignite to Ignite CDC streamer
@@ -80,12 +81,13 @@ image:../../assets/images/integrations/CDC-ignite2ignite.svg[]
=== Metrics
+[cols="25%,75%",opts="header"]
|===
|Name |Description
| `EventsCount` | Count of messages applied to destination cluster.
-| `LastEventTime` | Timestamp of last applied event.
-| `TypesCount` | Count of received binary types events.
-| `MappingsCount` | Count of received mappings events.
+| `LastEventTime` | Timestamp of last applied event to destination cluster.
+| `TypesCount` | Count of binary types events applied to destination cluster.
+| `MappingsCount` | Count of mappings events applied to destination cluster
|===
== CDC replication using Kafka
@@ -118,11 +120,15 @@ image:../../assets/images/integrations/CDC-ignite2kafka.svg[]
=== IgniteToKafkaCdcStreamer Metrics
+[cols="30%,70%",opts="header"]
|===
|Name |Description
-| `EventsCount` | Count of messages applied to destination cluster.
-| `LastEventTime` | Timestamp of last applied event.
-| `BytesSent` | Number of bytes send to Kafka.
+| `EventsCount` | Count of messages applied to Kafka.
+| `LastEventTime` | Timestamp of last applied event to Kafka.
+| `TypesCount` | Count of binary types events applied to Kafka.
+| `MappingsCount` | Count of mappings events applied to Kafka.
+| `BytesSent` | Count of bytes sent to Kafka.
+| `MarkersCount` | Count of metadata markers sent to Kafka.
|===
=== `kafka-to-ignite.sh` application
@@ -164,7 +170,7 @@ Kafka to Ignite configuration file should contain the following beans that will
. `java.util.Properties` bean with the name `kafkaProperties`: Single Kafka consumer configuration.
. `org.apache.ignite.cdc.kafka.KafkaToIgniteCdcStreamerConfiguration` bean: Options specific to `kafka-to-ignite.sh` application.
-[cols="20%,45%,35%",opts="header"]
+[cols="25%,45%,30%",opts="header"]
|===
|Name |Description | Default value
| `caches` | Set of cache names to replicate. | null
@@ -176,6 +182,19 @@ Kafka to Ignite configuration file should contain the following beans that will
| `kafkaRequestTimeout` | Kafka request timeout in milliseconds. | `3000`
| `maxBatchSize` | Maximum number of events to be sent to destination cluster in a single batch. | 1024
| `threadCount` | Count of threads to proceed consumers. Each thread poll records from dedicated partitions in round-robin manner. | 16
+|`metricRegistryName`| Name for metric registry. `org.apache.metricRegistryName.cdc.applier` | cdc-kafka-to-ignite
+|===
+
+=== Metrics
+
+[cols="35%,65%",opts="header"]
+|===
+|Name |Description
+| `EventsReceivedCount` | Count of events received from Kafka.
+| `LastEventReceivedTime` | Timestamp of last received event from Kafka.
+| `EventsSentCount` | Count of events sent to destination cluster.
+| `LastBatchSentTime` | Timestamp of last sent batch to the destination cluster.
+| `MarkersCount` | Count of metadata markers received from Kafka.
|===
==== Logging
diff --git a/modules/calcite/pom.xml b/modules/calcite/pom.xml
index 9aef74c606a3d..57ee3a8013e1b 100644
--- a/modules/calcite/pom.xml
+++ b/modules/calcite/pom.xml
@@ -42,7 +42,7 @@
2.8.2
3.1.8
2.4
- 2.7.0
+ 2.9.0
0.10.2
3.6.1
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/RootQuery.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/RootQuery.java
index 31fafc96eee6c..e67e7f98435b6 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/RootQuery.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/RootQuery.java
@@ -29,6 +29,7 @@
import java.util.stream.Collectors;
import org.apache.calcite.plan.Context;
import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.util.CancelFlag;
import org.apache.ignite.IgniteCheckedException;
@@ -139,10 +140,15 @@ public RootQuery(
Context parent = Commons.convert(qryCtx);
+ FrameworkConfig frameworkCfg = qryCtx != null ? qryCtx.unwrap(FrameworkConfig.class) : null;
+
+ if (frameworkCfg == null)
+ frameworkCfg = FRAMEWORK_CONFIG;
+
ctx = BaseQueryContext.builder()
.parentContext(parent)
.frameworkConfig(
- Frameworks.newConfigBuilder(FRAMEWORK_CONFIG)
+ Frameworks.newConfigBuilder(frameworkCfg)
.defaultSchema(schema)
.build()
)
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/ConverterUtils.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/ConverterUtils.java
index 0beb119701174..90c0c2af06bec 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/ConverterUtils.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/ConverterUtils.java
@@ -34,8 +34,10 @@
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.runtime.SqlFunctions;
import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.calcite.util.BuiltInMethod;
import org.apache.calcite.util.Util;
+import org.apache.ignite.internal.processors.query.calcite.util.Commons;
/** */
public class ConverterUtils {
@@ -166,6 +168,21 @@ static List internalTypes(List extends RexNode> operandList) {
return Util.transform(operandList, node -> toInternal(node.getType()));
}
+ /**
+ * Convert {@code operand} to {@code targetType}.
+ *
+ * @param operand The expression to convert
+ * @param targetType Target type
+ * @return A new expression with java type corresponding to {@code targetType}
+ * or original expression if there is no need to convert.
+ */
+ public static Expression convert(Expression operand, RelDataType targetType) {
+ if (SqlTypeUtil.isDecimal(targetType))
+ return convertToDecimal(operand, targetType);
+ else
+ return convert(operand, Commons.typeFactory().getJavaClass(targetType));
+ }
+
/**
* Convert {@code operand} to target type {@code toType}.
*
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexImpTable.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexImpTable.java
index e52342a432027..433fb9d29637a 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexImpTable.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexImpTable.java
@@ -1963,7 +1963,7 @@ private ParameterExpression genValueStatement(
final Expression convertedCallVal =
noConvert
? callVal
- : ConverterUtils.convert(callVal, returnType);
+ : ConverterUtils.convert(callVal, call.getType());
final Expression valExpression =
Expressions.condition(condition,
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexToLixTranslator.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexToLixTranslator.java
index 43ed1dbd6380c..e697948cc0639 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexToLixTranslator.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/exec/exp/RexToLixTranslator.java
@@ -62,7 +62,6 @@
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
-import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.util.BuiltInMethod;
@@ -552,11 +551,9 @@ Expression translateCast(
}
break;
}
- if (targetType.getSqlTypeName() == SqlTypeName.DECIMAL)
- convert = ConverterUtils.convertToDecimal(operand, targetType);
if (convert == null)
- convert = ConverterUtils.convert(operand, typeFactory.getJavaClass(targetType));
+ convert = ConverterUtils.convert(operand, targetType);
// Going from anything to CHAR(n) or VARCHAR(n), make sure value is no
// longer than n.
@@ -1073,7 +1070,7 @@ private Result implementCaseWhen(RexCall call) {
list.newName("case_when_value"));
list.add(Expressions.declare(0, valVariable, null));
final List operandList = call.getOperands();
- implementRecursively(this, operandList, valVariable, 0);
+ implementRecursively(this, operandList, valVariable, call.getType(), 0);
final Expression isNullExpression = checkNull(valVariable);
final ParameterExpression isNullVariable =
Expressions.parameter(
@@ -1108,8 +1105,13 @@ private Result implementCaseWhen(RexCall call) {
* }
*
*/
- private void implementRecursively(final RexToLixTranslator currentTranslator,
- final List operandList, final ParameterExpression valueVariable, int pos) {
+ private void implementRecursively(
+ final RexToLixTranslator currentTranslator,
+ final List operandList,
+ final ParameterExpression valueVariable,
+ final RelDataType valueType,
+ int pos
+ ) {
final BlockBuilder curBlockBuilder = currentTranslator.getBlockBuilder();
final List storageTypes = ConverterUtils.internalTypes(operandList);
// [ELSE] clause
@@ -1119,7 +1121,7 @@ private void implementRecursively(final RexToLixTranslator currentTranslator,
curBlockBuilder.add(
Expressions.statement(
Expressions.assign(valueVariable,
- ConverterUtils.convert(res, valueVariable.getType()))));
+ ConverterUtils.convert(res, valueType))));
return;
}
// Condition code: !a_isNull && a_value
@@ -1141,7 +1143,7 @@ private void implementRecursively(final RexToLixTranslator currentTranslator,
ifTrueBlockBuilder.add(
Expressions.statement(
Expressions.assign(valueVariable,
- ConverterUtils.convert(ifTrueRes, valueVariable.getType()))));
+ ConverterUtils.convert(ifTrueRes, valueType))));
final BlockStatement ifTrue = ifTrueBlockBuilder.toBlock();
// There is no [ELSE] clause
if (pos + 1 == operandList.size() - 1) {
@@ -1154,7 +1156,7 @@ private void implementRecursively(final RexToLixTranslator currentTranslator,
new BlockBuilder(true, curBlockBuilder);
final RexToLixTranslator ifFalseTranslator =
currentTranslator.setBlock(ifFalseBlockBuilder);
- implementRecursively(ifFalseTranslator, operandList, valueVariable, pos + 2);
+ implementRecursively(ifFalseTranslator, operandList, valueVariable, valueType, pos + 2);
final BlockStatement ifFalse = ifFalseBlockBuilder.toBlock();
curBlockBuilder.add(
Expressions.ifThenElse(tester, ifTrue, ifFalse));
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/schema/CacheIndexImpl.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/schema/CacheIndexImpl.java
index 079755aec5e17..06a3e55869bb2 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/schema/CacheIndexImpl.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/schema/CacheIndexImpl.java
@@ -258,8 +258,7 @@ else if (checkExpired)
if (idxKeys.size() < requiredColumns.cardinality() || !ImmutableBitSet.of(idxKeys).contains(requiredColumns))
return false;
- List keyDefs = new ArrayList<>(idx.unwrap(InlineIndex.class).indexDefinition()
- .indexKeyDefinitions().values());
+ List keyDefs = new ArrayList<>(idx.indexDefinition().indexKeyDefinitions().values());
for (InlineIndexKeyType keyType : InlineIndexKeyTypeRegistry.types(keyDefs, new IndexKeyTypeSettings())) {
// Skip variable length keys and java objects (see comments about these limitations in IndexScan class).
diff --git a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/type/IgniteTypeSystem.java b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/type/IgniteTypeSystem.java
index dcde15bca89b3..a5a9594e20b82 100644
--- a/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/type/IgniteTypeSystem.java
+++ b/modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/type/IgniteTypeSystem.java
@@ -43,6 +43,16 @@ public class IgniteTypeSystem extends RelDataTypeSystemImpl implements Serializa
return Short.MAX_VALUE;
}
+ /** {@inheritDoc} */
+ @Override public int getDefaultPrecision(SqlTypeName typeName) {
+ // Timestamps internally stored as millis, precision more than 3 is redundant. At the same time,
+ // default Calcite precision 0 causes truncation when converting to TIMESTAMP without specifying precision.
+ if (typeName == SqlTypeName.TIMESTAMP || typeName == SqlTypeName.TIME_WITH_LOCAL_TIME_ZONE)
+ return 3;
+
+ return super.getDefaultPrecision(typeName);
+ }
+
/** {@inheritDoc} */
@Override public RelDataType deriveSumType(RelDataTypeFactory typeFactory, RelDataType argumentType) {
RelDataType sumType;
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/QueryChecker.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/QueryChecker.java
index 19f12244783f4..2d8f936919d18 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/QueryChecker.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/QueryChecker.java
@@ -28,12 +28,14 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;
+import org.apache.calcite.tools.FrameworkConfig;
import org.apache.ignite.Ignite;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicCache;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition;
+import org.apache.ignite.internal.processors.query.QueryContext;
import org.apache.ignite.internal.processors.query.QueryEngine;
import org.apache.ignite.internal.processors.query.schema.management.SchemaManager;
import org.apache.ignite.internal.util.typedef.F;
@@ -296,6 +298,9 @@ public static Matcher containsAnyScan(final String schema, final String
/** */
private String exactPlan;
+ /** */
+ private FrameworkConfig frameworkCfg;
+
/** */
public QueryChecker(String qry) {
this.qry = qry;
@@ -322,6 +327,13 @@ public QueryChecker withParams(Object... params) {
return this;
}
+ /** */
+ public QueryChecker withFrameworkConfig(FrameworkConfig frameworkCfg) {
+ this.frameworkCfg = frameworkCfg;
+
+ return this;
+ }
+
/** */
public QueryChecker returns(Object... res) {
if (expectedResult == null)
@@ -370,8 +382,10 @@ public void check() {
// Check plan.
QueryEngine engine = getEngine();
+ QueryContext ctx = frameworkCfg != null ? QueryContext.of(frameworkCfg) : null;
+
List>> explainCursors =
- engine.query(null, "PUBLIC", "EXPLAIN PLAN FOR " + qry, params);
+ engine.query(ctx, "PUBLIC", "EXPLAIN PLAN FOR " + qry, params);
FieldsQueryCursor> explainCursor = explainCursors.get(0);
List> explainRes = explainCursor.getAll();
@@ -387,7 +401,7 @@ public void check() {
// Check result.
List>> cursors =
- engine.query(null, "PUBLIC", qry, params);
+ engine.query(ctx, "PUBLIC", qry, params);
FieldsQueryCursor> cur = cursors.get(0);
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DataTypesTest.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DataTypesTest.java
index b6dd0aa7b82af..23474698fe533 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DataTypesTest.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DataTypesTest.java
@@ -24,6 +24,8 @@
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableSet;
import org.apache.calcite.runtime.CalciteException;
+import org.apache.calcite.tools.FrameworkConfig;
+import org.apache.calcite.tools.Frameworks;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.configuration.CacheConfiguration;
@@ -31,6 +33,8 @@
import org.apache.ignite.internal.util.typedef.F;
import org.junit.Test;
+import static org.apache.ignite.internal.processors.query.calcite.CalciteQueryProcessor.FRAMEWORK_CONFIG;
+
/**
* Test SQL data types.
*/
@@ -467,6 +471,25 @@ public void testNumericConversion() {
.check();
}
+ /** */
+ @Test
+ public void testFunctionArgsToNumericImplicitConversion() {
+ assertQuery("select decode(?, 0, 0, 1, 1.0)").withParams(0).returns(new BigDecimal("0.0")).check();
+ assertQuery("select decode(?, 0, 0, 1, 1.0)").withParams(1).returns(new BigDecimal("1.0")).check();
+ assertQuery("select decode(?, 0, 0, 1, 1.000)").withParams(0).returns(new BigDecimal("0.000")).check();
+ assertQuery("select decode(?, 0, 0, 1, 1.000)").withParams(1).returns(new BigDecimal("1.000")).check();
+ assertQuery("select decode(?, 0, 0.0, 1, 1.000)").withParams(0).returns(new BigDecimal("0.000")).check();
+ assertQuery("select decode(?, 0, 0.000, 1, 1.0)").withParams(1).returns(new BigDecimal("1.000")).check();
+
+ // With callRewrite==true function COALESCE is rewritten to CASE and CoalesceImplementor can't be checked.
+ FrameworkConfig frameworkCfg = Frameworks.newConfigBuilder(FRAMEWORK_CONFIG)
+ .sqlValidatorConfig(FRAMEWORK_CONFIG.getSqlValidatorConfig().withCallRewrite(false))
+ .build();
+
+ assertQuery("select coalesce(?, 1.000)").withParams(0).withFrameworkConfig(frameworkCfg)
+ .returns(new BigDecimal("0.000")).check();
+ }
+
/** */
@Test
public void testArithmeticOverflow() {
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/DateTimeTest.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DateTimeTest.java
similarity index 63%
rename from modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/DateTimeTest.java
rename to modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DateTimeTest.java
index bce76202e4085..1335e33aa6e20 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/DateTimeTest.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DateTimeTest.java
@@ -15,39 +15,29 @@
* limitations under the License.
*/
-package org.apache.ignite.internal.processors.query.calcite;
+package org.apache.ignite.internal.processors.query.calcite.integration;
import java.sql.Time;
import java.sql.Timestamp;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
-import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.configuration.CacheConfiguration;
-import org.apache.ignite.internal.IgniteEx;
-import org.apache.ignite.internal.processors.query.QueryEngine;
-import org.apache.ignite.internal.processors.query.calcite.util.Commons;
-import org.apache.ignite.testframework.junits.WithSystemProperty;
-import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import static java.util.Collections.singletonList;
/** */
-@WithSystemProperty(key = "calcite.debug", value = "true")
-public class DateTimeTest extends GridCommonAbstractTest {
+public class DateTimeTest extends AbstractBasicIntegrationTest {
/** */
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
- /** */
- private static QueryEngine queryEngine;
-
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
- Ignite grid = startGridsMultiThreaded(1);
+ super.beforeTestsStarted();
QueryEntity qryEnt = new QueryEntity();
qryEnt.setKeyFieldName("ID");
@@ -68,7 +58,7 @@ public class DateTimeTest extends GridCommonAbstractTest {
.setQueryEntities(singletonList(qryEnt))
.setSqlSchema("PUBLIC");
- IgniteCache dateTimeCache = grid.createCache(cfg);
+ IgniteCache dateTimeCache = client.createCache(cfg);
dateTimeCache.put(1, new DateTimeEntry(1, javaDate("2020-10-01 12:00:00.000"),
sqlDate("2020-10-01"), sqlTime("12:00:00"), sqlTimestamp("2020-10-01 12:00:00.000")));
@@ -78,16 +68,17 @@ public class DateTimeTest extends GridCommonAbstractTest {
sqlDate("2020-10-20"), sqlTime("13:15:00"), sqlTimestamp("2020-10-20 13:15:00.000")));
dateTimeCache.put(4, new DateTimeEntry(4, javaDate("2020-01-01 22:40:00.000"),
sqlDate("2020-01-01"), sqlTime("22:40:00"), sqlTimestamp("2020-01-01 22:40:00.000")));
+ }
- queryEngine = Commons.lookupComponent(((IgniteEx)grid).context(), QueryEngine.class);
-
- awaitPartitionMapExchange();
+ /** {@inheritDoc} */
+ @Override protected void afterTest() throws Exception {
+ // Don't clean up caches after test.
}
/** */
@Test
public void testQuery1() throws Exception {
- checkQuery("SELECT SQLDATE FROM datetimetable where SQLTIME = '12:00:00'")
+ assertQuery("SELECT SQLDATE FROM datetimetable where SQLTIME = '12:00:00'")
.returns(sqlDate("2020-10-01"))
.check();
}
@@ -95,7 +86,7 @@ public void testQuery1() throws Exception {
/** */
@Test
public void testQuery2() throws Exception {
- checkQuery("SELECT SQLDATE FROM datetimetable where JAVADATE = ?")
+ assertQuery("SELECT SQLDATE FROM datetimetable where JAVADATE = ?")
.withParams(javaDate("2020-12-01 00:10:20.000"))
.returns(sqlDate("2020-12-01"))
.check();
@@ -104,7 +95,7 @@ public void testQuery2() throws Exception {
/** */
@Test
public void testQuery3() throws Exception {
- checkQuery("SELECT SQLDATE FROM datetimetable where JAVADATE = ?")
+ assertQuery("SELECT SQLDATE FROM datetimetable where JAVADATE = ?")
.withParams(sqlTimestamp("2020-12-01 00:10:20.000"))
.returns(sqlDate("2020-12-01"))
.check();
@@ -113,7 +104,7 @@ public void testQuery3() throws Exception {
/** */
@Test
public void testQuery4() throws Exception {
- checkQuery("SELECT MAX(SQLDATE) FROM datetimetable")
+ assertQuery("SELECT MAX(SQLDATE) FROM datetimetable")
.returns(sqlDate("2020-12-01"))
.check();
}
@@ -121,7 +112,7 @@ public void testQuery4() throws Exception {
/** */
@Test
public void testQuery5() throws Exception {
- checkQuery("SELECT MIN(SQLDATE) FROM datetimetable")
+ assertQuery("SELECT MIN(SQLDATE) FROM datetimetable")
.returns(sqlDate("2020-01-01"))
.check();
}
@@ -129,7 +120,7 @@ public void testQuery5() throws Exception {
/** */
@Test
public void testQuery6() throws Exception {
- checkQuery("SELECT JAVADATE FROM datetimetable WHERE SQLTIME = '13:15:00'")
+ assertQuery("SELECT JAVADATE FROM datetimetable WHERE SQLTIME = '13:15:00'")
.returns(javaDate("2020-10-20 13:15:00.000"))
.check();
}
@@ -137,7 +128,7 @@ public void testQuery6() throws Exception {
/** */
@Test
public void testQuery7() throws Exception {
- checkQuery("SELECT t1.JAVADATE, t2.JAVADATE FROM datetimetable t1 " +
+ assertQuery("SELECT t1.JAVADATE, t2.JAVADATE FROM datetimetable t1 " +
"INNER JOIN " +
"(SELECT JAVADATE, CAST(SQLTIMESTAMP AS TIME) AS CASTED_TIME FROM datetimetable) t2 " +
"ON t1.SQLTIME = t2.CASTED_TIME " +
@@ -157,52 +148,52 @@ public void testDstShift() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("Europe/Moscow"));
// Time zone change (EET->MSK) 1992-01-19 02:00:00 -> 1992-01-19 03:00:00
- checkQuery("select date '1992-01-19'").returns(sqlDate("1992-01-19")).check();
- checkQuery("select date '1992-01-18' + interval (1) days").returns(sqlDate("1992-01-19")).check();
- checkQuery("select date '1992-01-18' + interval (24) hours").returns(sqlDate("1992-01-19")).check();
- checkQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (25) hours")
+ assertQuery("select date '1992-01-19'").returns(sqlDate("1992-01-19")).check();
+ assertQuery("select date '1992-01-18' + interval (1) days").returns(sqlDate("1992-01-19")).check();
+ assertQuery("select date '1992-01-18' + interval (24) hours").returns(sqlDate("1992-01-19")).check();
+ assertQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (25) hours")
.returns(sqlTimestamp("1992-01-19 03:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (23) hours")
+ assertQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (23) hours")
.returns(sqlTimestamp("1992-01-19 01:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (24) hours")
+ assertQuery("SELECT timestamp '1992-01-18 02:30:00' + interval (24) hours")
.returns(sqlTimestamp("1992-01-19 02:30:00.000")).check();
// DST started 1992-03-29 02:00:00 -> 1992-03-29 03:00:00
- checkQuery("select date '1992-03-29'").returns(sqlDate("1992-03-29")).check();
- checkQuery("select date '1992-03-28' + interval (1) days").returns(sqlDate("1992-03-29")).check();
- checkQuery("select date '1992-03-28' + interval (24) hours").returns(sqlDate("1992-03-29")).check();
- checkQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (25) hours")
+ assertQuery("select date '1992-03-29'").returns(sqlDate("1992-03-29")).check();
+ assertQuery("select date '1992-03-28' + interval (1) days").returns(sqlDate("1992-03-29")).check();
+ assertQuery("select date '1992-03-28' + interval (24) hours").returns(sqlDate("1992-03-29")).check();
+ assertQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (25) hours")
.returns(sqlTimestamp("1992-03-29 03:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (23) hours")
+ assertQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (23) hours")
.returns(sqlTimestamp("1992-03-29 01:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (24) hours")
+ assertQuery("SELECT timestamp '1992-03-28 02:30:00' + interval (24) hours")
.returns(sqlTimestamp("1992-03-29 02:30:00.000")).check();
// DST ended 1992-09-27 03:00:00 -> 1992-09-27 02:00:00
- checkQuery("select date '1992-09-27'").returns(sqlDate("1992-09-27")).check();
- checkQuery("select date '1992-09-26' + interval (1) days").returns(sqlDate("1992-09-27")).check();
- checkQuery("select date '1992-09-26' + interval (24) hours").returns(sqlDate("1992-09-27")).check();
- checkQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (25) hours")
+ assertQuery("select date '1992-09-27'").returns(sqlDate("1992-09-27")).check();
+ assertQuery("select date '1992-09-26' + interval (1) days").returns(sqlDate("1992-09-27")).check();
+ assertQuery("select date '1992-09-26' + interval (24) hours").returns(sqlDate("1992-09-27")).check();
+ assertQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (25) hours")
.returns(sqlTimestamp("1992-09-27 03:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (23) hours")
+ assertQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (23) hours")
.returns(sqlTimestamp("1992-09-27 01:30:00.000")).check();
- checkQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (24) hours")
+ assertQuery("SELECT timestamp '1992-09-26 02:30:00' + interval (24) hours")
.returns(sqlTimestamp("1992-09-27 02:30:00.000")).check();
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
// DST ended 2021-11-07 02:00:00 -> 2021-11-07 01:00:00
- checkQuery("select date '2021-11-07'").returns(sqlDate("2021-11-07")).check();
- checkQuery("select date '2021-11-06' + interval (1) days").returns(sqlDate("2021-11-07")).check();
- checkQuery("select date '2021-11-06' + interval (24) hours").returns(sqlDate("2021-11-07")).check();
- checkQuery("SELECT timestamp '2021-11-06 01:30:00' + interval (25) hours")
+ assertQuery("select date '2021-11-07'").returns(sqlDate("2021-11-07")).check();
+ assertQuery("select date '2021-11-06' + interval (1) days").returns(sqlDate("2021-11-07")).check();
+ assertQuery("select date '2021-11-06' + interval (24) hours").returns(sqlDate("2021-11-07")).check();
+ assertQuery("SELECT timestamp '2021-11-06 01:30:00' + interval (25) hours")
.returns(sqlTimestamp("2021-11-07 02:30:00.000")).check();
// Check string representation here, since after timestamp calculation we have '2021-11-07T01:30:00.000-0800'
// but Timestamp.valueOf method converts '2021-11-07 01:30:00' in 'America/Los_Angeles' time zone to
// '2021-11-07T01:30:00.000-0700' (we pass through '2021-11-07 01:30:00' twice after DST ended).
- checkQuery("SELECT (timestamp '2021-11-06 02:30:00' + interval (23) hours)::varchar")
+ assertQuery("SELECT (timestamp '2021-11-06 02:30:00' + interval (23) hours)::varchar")
.returns("2021-11-07 01:30:00").check();
- checkQuery("SELECT (timestamp '2021-11-06 01:30:00' + interval (24) hours)::varchar")
+ assertQuery("SELECT (timestamp '2021-11-06 01:30:00' + interval (24) hours)::varchar")
.returns("2021-11-07 01:30:00").check();
}
finally {
@@ -210,6 +201,31 @@ public void testDstShift() throws Exception {
}
}
+ /** */
+ @Test
+ public void testDateTimeCast() throws Exception {
+ assertQuery("SELECT CAST('2021-01-01 01:02:03.456' AS TIMESTAMP)")
+ .returns(sqlTimestamp("2021-01-01 01:02:03.456")).check();
+
+ assertQuery("SELECT CAST('2021-01-01 01:02:03.0' AS TIMESTAMP)")
+ .returns(sqlTimestamp("2021-01-01 01:02:03")).check();
+
+ assertQuery("SELECT CAST('2021-01-01 01:02:03' AS TIMESTAMP)")
+ .returns(sqlTimestamp("2021-01-01 01:02:03")).check();
+
+ assertQuery("SELECT CAST('2021-01-01 01:02:03.456' AS TIMESTAMP(0))")
+ .returns(sqlTimestamp("2021-01-01 01:02:03")).check();
+
+ assertQuery("SELECT CAST('2021-01-01 01:02:03.456' AS TIMESTAMP(2))")
+ .returns(sqlTimestamp("2021-01-01 01:02:03.45")).check();
+
+ assertQuery("SELECT CAST('2021-01-01' AS DATE)")
+ .returns(sqlDate("2021-01-01")).check();
+
+ assertQuery("SELECT CAST('01:02:03' AS TIME)")
+ .returns(sqlTime("01:02:03")).check();
+ }
+
/** */
public static class DateTimeEntry {
/** */
@@ -237,15 +253,6 @@ public DateTimeEntry(long id, Date javaDate, java.sql.Date sqlDate, Time sqlTime
}
}
- /** */
- private QueryChecker checkQuery(String qry) {
- return new QueryChecker(qry) {
- @Override protected QueryEngine getEngine() {
- return queryEngine;
- }
- };
- }
-
/** */
private Date javaDate(String str) throws Exception {
return DATE_FORMAT.parse(str);
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DynamicParametersIntegrationTest.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DynamicParametersIntegrationTest.java
index ae35a0d51f034..89fba933b5bdc 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DynamicParametersIntegrationTest.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/DynamicParametersIntegrationTest.java
@@ -52,7 +52,7 @@ public void testMetadataTypesForDynamicParameters() {
);
List types = F.asList("VARCHAR", "DECIMAL(32767, 0)", "INTEGER", "BIGINT", "REAL", "DOUBLE",
- "UUID", "INTERVAL DAY TO SECOND", "DATE", "TIMESTAMP(0)", "TIME(0)", "INTERVAL YEAR TO MONTH");
+ "UUID", "INTERVAL DAY TO SECOND", "DATE", "TIMESTAMP(3)", "TIME(0)", "INTERVAL YEAR TO MONTH");
for (int i = 0; i < values.size(); i++) {
assertQuery("SELECT typeof(?)").withParams(values.get(i)).returns(types.get(i)).check();
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/QueryMetadataIntegrationTest.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/QueryMetadataIntegrationTest.java
index c3e5ebbc6c819..36ee4dc647a06 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/QueryMetadataIntegrationTest.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/QueryMetadataIntegrationTest.java
@@ -52,16 +52,16 @@ public void testJoin() throws Exception {
.add("PUBLIC", "TBL1", BigDecimal.class, "ID", 10, 2, true)
.add("PUBLIC", "TBL1", String.class, "VAL", true)
.add("PUBLIC", "TBL1", Long.class, "VAL2", 19, 0, true)
- .add("PUBLIC", "TBL1", java.sql.Timestamp.class, "TS", 0, SCALE_NOT_SPECIFIED, true)
+ .add("PUBLIC", "TBL1", java.sql.Timestamp.class, "TS", 3, SCALE_NOT_SPECIFIED, true)
.add("PUBLIC", "TBL2", BigDecimal.class, "ID", 10, 2, false)
.add("PUBLIC", "TBL2", String.class, "VAL", true)
.add("PUBLIC", "TBL2", Long.class, "VAL2", 19, 0, true)
- .add("PUBLIC", "TBL2", java.sql.Timestamp.class, "TS", 0, SCALE_NOT_SPECIFIED, true),
+ .add("PUBLIC", "TBL2", java.sql.Timestamp.class, "TS", 3, SCALE_NOT_SPECIFIED, true),
builder -> builder
.add(BigDecimal.class, 10, 2)
.add(BigDecimal.class, 10, 2)
.add(String.class)
- .add(java.sql.Timestamp.class, 0, SCALE_NOT_SPECIFIED)
+ .add(java.sql.Timestamp.class, 3, SCALE_NOT_SPECIFIED)
).check();
}
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/TableDdlIntegrationTest.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/TableDdlIntegrationTest.java
index ccf7b0b5ae06c..98ce8275f2111 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/TableDdlIntegrationTest.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/integration/TableDdlIntegrationTest.java
@@ -117,7 +117,7 @@ public void createTableDifferentDataTypes() {
"'test', " +
"date '2021-01-01', " +
"time '12:34:56', " +
- "timestamp '2021-01-01 12:34:56', " +
+ "timestamp '2021-01-01 12:34:56.789', " +
"1, " +
"9876543210, " +
"3, " +
@@ -138,7 +138,7 @@ public void createTableDifferentDataTypes() {
assertEquals("test", row.get(1));
assertEquals(Date.valueOf("2021-01-01"), row.get(2));
assertEquals(Time.valueOf("12:34:56"), row.get(3));
- assertEquals(Timestamp.valueOf("2021-01-01 12:34:56"), row.get(4));
+ assertEquals(Timestamp.valueOf("2021-01-01 12:34:56.789"), row.get(4));
assertEquals(1, row.get(5));
assertEquals(9876543210L, row.get(6));
assertEquals((short)3, row.get(7));
diff --git a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/planner/TestTable.java b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/planner/TestTable.java
index f6632a35284ec..c0220b0802828 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/planner/TestTable.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/internal/processors/query/calcite/planner/TestTable.java
@@ -49,8 +49,8 @@
import org.apache.ignite.internal.cache.query.index.SortOrder;
import org.apache.ignite.internal.cache.query.index.sorted.IndexKeyDefinition;
import org.apache.ignite.internal.cache.query.index.sorted.IndexKeyType;
+import org.apache.ignite.internal.cache.query.index.sorted.client.ClientIndex;
import org.apache.ignite.internal.cache.query.index.sorted.client.ClientIndexDefinition;
-import org.apache.ignite.internal.cache.query.index.sorted.client.ClientInlineIndex;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.internal.processors.query.calcite.exec.ExecutionContext;
import org.apache.ignite.internal.processors.query.calcite.metadata.ColocationGroup;
@@ -246,12 +246,10 @@ public TestTable addIndex(RelCollation collation, String name) {
IndexDefinition idxDef = new ClientIndexDefinition(
new IndexName(QueryUtils.createTableCacheName(DEFAULT_SCHEMA, this.name), DEFAULT_SCHEMA, this.name, name),
- keyDefs,
- -1,
- -1
+ keyDefs
);
- indexes.put(name, new CacheIndexImpl(collation, name, new ClientInlineIndex(idxDef, -1), this));
+ indexes.put(name, new CacheIndexImpl(collation, name, new ClientIndex(idxDef), this));
return this;
}
diff --git a/modules/calcite/src/test/java/org/apache/ignite/testsuites/IntegrationTestSuite.java b/modules/calcite/src/test/java/org/apache/ignite/testsuites/IntegrationTestSuite.java
index dd23c284ac138..84dcc6fdb6133 100644
--- a/modules/calcite/src/test/java/org/apache/ignite/testsuites/IntegrationTestSuite.java
+++ b/modules/calcite/src/test/java/org/apache/ignite/testsuites/IntegrationTestSuite.java
@@ -20,7 +20,6 @@
import org.apache.ignite.internal.processors.cache.DdlTransactionCalciteSelfTest;
import org.apache.ignite.internal.processors.query.calcite.CalciteQueryProcessorTest;
import org.apache.ignite.internal.processors.query.calcite.CancelTest;
-import org.apache.ignite.internal.processors.query.calcite.DateTimeTest;
import org.apache.ignite.internal.processors.query.calcite.IndexWithSameNameCalciteTest;
import org.apache.ignite.internal.processors.query.calcite.SqlFieldsQueryUsageTest;
import org.apache.ignite.internal.processors.query.calcite.UnstableTopologyTest;
@@ -30,6 +29,7 @@
import org.apache.ignite.internal.processors.query.calcite.integration.CalciteErrorHandlilngIntegrationTest;
import org.apache.ignite.internal.processors.query.calcite.integration.CorrelatesIntegrationTest;
import org.apache.ignite.internal.processors.query.calcite.integration.DataTypesTest;
+import org.apache.ignite.internal.processors.query.calcite.integration.DateTimeTest;
import org.apache.ignite.internal.processors.query.calcite.integration.DynamicParametersIntegrationTest;
import org.apache.ignite.internal.processors.query.calcite.integration.ExpiredEntriesIntegrationTest;
import org.apache.ignite.internal.processors.query.calcite.integration.FunctionsTest;
diff --git a/modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test_ignore b/modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test
similarity index 69%
rename from modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test_ignore
rename to modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test
index 17711eb7b0509..3e0f454054bc1 100644
--- a/modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test_ignore
+++ b/modules/calcite/src/test/sql/types/timestamp/test_timestamp_ms.test
@@ -1,12 +1,11 @@
# name: test/sql/types/timestamp/test_timestamp_ms.test
# description: Test milliseconds with timestamp
# group: [timestamp]
-# Ignore https://issues.apache.org/jira/browse/IGNITE-15623
query TT
-SELECT CAST('2001-04-20 14:42:11.123' AS TIMESTAMP) a, CAST('2001-04-20 14:42:11.0' AS TIMESTAMP) b;
+SELECT CAST('2001-04-20 14:42:11.123' AS TIMESTAMP) a, CAST('2001-04-20 14:42:11.00' AS TIMESTAMP) b;
----
-2001-04-20 14:42:11.123 2001-04-20 14:42:11
+2001-04-20 14:42:11.123 2001-04-20 14:42:11.0
# many ms
query I
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryMarshaller.java b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryMarshaller.java
index 7db76c1ddabd9..21251db8572fd 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryMarshaller.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryMarshaller.java
@@ -97,7 +97,7 @@ public void setBinaryContext(BinaryContext ctx, IgniteConfiguration cfg) {
}
/** {@inheritDoc} */
- @Override protected T unmarshal0(byte[] bytes, @Nullable ClassLoader clsLdr) throws IgniteCheckedException {
+ @Override protected T unmarshal0(byte[] bytes, @Nullable ClassLoader clsLdr) {
return impl.deserialize(bytes, clsLdr);
}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/Index.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/Index.java
index 955bc03f651f3..45984b40c25f3 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/Index.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/Index.java
@@ -66,4 +66,9 @@ public void onUpdate(@Nullable CacheDataRow oldRow, @Nullable CacheDataRow newRo
* @param softDelete if {@code true} then perform logical deletion.
*/
public void destroy(boolean softDelete);
+
+ /**
+ * @return Index definition.
+ */
+ public IndexDefinition indexDefinition();
}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/SortedSegmentedIndex.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/SortedSegmentedIndex.java
index 1db578257d306..92738a48b8ff8 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/SortedSegmentedIndex.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/SortedSegmentedIndex.java
@@ -123,4 +123,7 @@ public GridCursor findLast(int segment, IndexQueryContext qryCtx)
* @return amount of index tree segments.
*/
public int segmentsCount();
+
+ /** {@inheritDoc} */
+ @Override public SortedIndexDefinition indexDefinition();
}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndex.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndex.java
new file mode 100644
index 0000000000000..db34e3ba9470c
--- /dev/null
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndex.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.cache.query.index.sorted.client;
+
+import java.util.UUID;
+import org.apache.ignite.internal.cache.query.index.Index;
+import org.apache.ignite.internal.cache.query.index.IndexDefinition;
+
+/**
+ * We need indexes on non-affinity nodes. This index does not contain any data.
+ */
+public class ClientIndex extends AbstractClientIndex implements Index {
+ /** Index id. */
+ private final UUID id = UUID.randomUUID();
+
+ /** Index definition. */
+ private final IndexDefinition def;
+
+ /** */
+ public ClientIndex(IndexDefinition def) {
+ this.def = def;
+ }
+
+ /** {@inheritDoc} */
+ @Override public UUID id() {
+ return id;
+ }
+
+ /** {@inheritDoc} */
+ @Override public String name() {
+ return def.idxName().idxName();
+ }
+
+ /** {@inheritDoc} */
+ @Override public IndexDefinition indexDefinition() {
+ return def;
+ }
+}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexDefinition.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexDefinition.java
index 36b8d4d0d7e92..e8fa6066ae019 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexDefinition.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexDefinition.java
@@ -26,12 +26,6 @@
* Define index for filtered or client node.
*/
public class ClientIndexDefinition implements IndexDefinition {
- /** */
- private final int cfgInlineSize;
-
- /** */
- private final int maxInlineSize;
-
/** */
private final IndexName idxName;
@@ -39,28 +33,11 @@ public class ClientIndexDefinition implements IndexDefinition {
private final LinkedHashMap keyDefs;
/** */
- public ClientIndexDefinition(
- IndexName idxName,
- LinkedHashMap keyDefs,
- int cfgInlineSize,
- int maxInlineSize
- ) {
+ public ClientIndexDefinition(IndexName idxName, LinkedHashMap keyDefs) {
this.idxName = idxName;
- this.cfgInlineSize = cfgInlineSize;
- this.maxInlineSize = maxInlineSize;
this.keyDefs = keyDefs;
}
- /** */
- public int getCfgInlineSize() {
- return cfgInlineSize;
- }
-
- /** */
- public int getMaxInlineSize() {
- return maxInlineSize;
- }
-
/** {@inheritDoc} */
@Override public IndexName idxName() {
return idxName;
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexFactory.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexFactory.java
index 6f63445550b0d..9d9c8bbc09de6 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexFactory.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientIndexFactory.java
@@ -17,52 +17,17 @@
package org.apache.ignite.internal.cache.query.index.sorted.client;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.cache.query.index.Index;
import org.apache.ignite.internal.cache.query.index.IndexDefinition;
import org.apache.ignite.internal.cache.query.index.IndexFactory;
-import org.apache.ignite.internal.cache.query.index.sorted.IndexKeyDefinition;
-import org.apache.ignite.internal.cache.query.index.sorted.IndexKeyTypeSettings;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndexKeyType;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndexKeyTypeRegistry;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndexTree;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
/**
* Factory for client index.
*/
public class ClientIndexFactory implements IndexFactory {
- /** Dummy key types. */
- private static final IndexKeyTypeSettings DUMMY_SETTINGS = new IndexKeyTypeSettings();
-
- /** Logger. */
- private final IgniteLogger log;
-
- /** */
- public ClientIndexFactory(IgniteLogger log) {
- this.log = log;
- }
-
/** {@inheritDoc} */
- @Override public Index createIndex(GridCacheContext, ?> cctx, IndexDefinition definition) {
- ClientIndexDefinition def = (ClientIndexDefinition)definition;
-
- LinkedHashMap keyDefs = definition.indexKeyDefinitions();
-
- List keyTypes = InlineIndexKeyTypeRegistry.types(keyDefs.values(), DUMMY_SETTINGS);
-
- int inlineSize = InlineIndexTree.computeInlineSize(
- definition.idxName().fullName(),
- keyTypes,
- new ArrayList<>(keyDefs.values()),
- def.getCfgInlineSize(),
- def.getMaxInlineSize(),
- log
- );
-
- return new ClientInlineIndex(def, inlineSize);
+ @Override public Index createIndex(GridCacheContext, ?> cctx, IndexDefinition def) {
+ return new ClientIndex(def);
}
}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientInlineIndex.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientInlineIndex.java
deleted file mode 100644
index 1f04dcab39781..0000000000000
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/client/ClientInlineIndex.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ignite.internal.cache.query.index.sorted.client;
-
-import java.util.UUID;
-import org.apache.ignite.internal.cache.query.index.IndexDefinition;
-import org.apache.ignite.internal.cache.query.index.sorted.IndexRow;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.IndexQueryContext;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndex;
-import org.apache.ignite.internal.cache.query.index.sorted.inline.InlineIndexTree;
-import org.apache.ignite.internal.util.lang.GridCursor;
-
-/**
- * We need indexes on non-affinity nodes. This index does not contain any data.
- */
-public class ClientInlineIndex extends AbstractClientIndex implements InlineIndex {
- /** */
- private final int inlineSize;
-
- /** Index id. */
- private final UUID id = UUID.randomUUID();
-
- /** Index definition. */
- private final IndexDefinition def;
-
- /** */
- public ClientInlineIndex(IndexDefinition def, int inlineSize) {
- this.def = def;
- this.inlineSize = inlineSize;
- }
-
- /** {@inheritDoc} */
- @Override public int inlineSize() {
- return inlineSize;
- }
-
- /** {@inheritDoc} */
- @Override public boolean created() {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public InlineIndexTree segment(int segment) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public GridCursor find(
- IndexRow lower,
- IndexRow upper,
- boolean lowIncl,
- boolean upIncl,
- int segment,
- IndexQueryContext qryCtx
- ) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public GridCursor find(
- IndexRow lower,
- IndexRow upper,
- boolean lowIncl,
- boolean upIncl,
- IndexQueryContext qryCtx
- ) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public GridCursor findFirst(int segment, IndexQueryContext qryCtx) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public GridCursor findLast(int segment, IndexQueryContext qryCtx) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public GridCursor findFirstOrLast(IndexQueryContext qryCtx, boolean first) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public long count(int segment) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public long totalCount() {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public long count(int segment, IndexQueryContext qryCtx) {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public int segmentsCount() {
- throw unsupported();
- }
-
- /** {@inheritDoc} */
- @Override public UUID id() {
- return id;
- }
-
- /** {@inheritDoc} */
- @Override public String name() {
- return def.idxName().idxName();
- }
-
- /** {@inheritDoc} */
- @Override public IndexDefinition indexDefinition() {
- return def;
- }
-}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/inline/InlineIndex.java b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/inline/InlineIndex.java
index ffff30f57a89c..35a198e1de597 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/inline/InlineIndex.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/cache/query/index/sorted/inline/InlineIndex.java
@@ -17,7 +17,6 @@
package org.apache.ignite.internal.cache.query.index.sorted.inline;
-import org.apache.ignite.internal.cache.query.index.IndexDefinition;
import org.apache.ignite.internal.cache.query.index.sorted.SortedSegmentedIndex;
/**
@@ -39,9 +38,4 @@ public interface InlineIndex extends SortedSegmentedIndex {
* @return Tree segment for specified number.
*/
public InlineIndexTree segment(int segment);
-
- /**
- * @return Index definition.
- */
- public IndexDefinition indexDefinition();
}
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/GridDeploymentPerVersionStore.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/GridDeploymentPerVersionStore.java
index 5c2e84112bc4f..7c6c3246fd5ac 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/GridDeploymentPerVersionStore.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/GridDeploymentPerVersionStore.java
@@ -329,6 +329,26 @@ else if (log.isDebugEnabled())
if (isDeadClassLoader(meta))
return null;
+ boolean skipSearchDeployment = false;
+
+ // Check already exist deployment.
+ if (meta.deploymentMode() == SHARED) {
+ Collection created = getDeployments();
+
+ for (GridDeployment dep0 : created) {
+ // hot redeploy from same node
+ if (dep0.participants().containsKey(meta.senderNodeId()) || dep0.undeployed())
+ continue;
+
+ IgniteBiTuple, Throwable> cls = dep0.deployedClass(meta.className(), meta.alias());
+
+ if (cls.getKey() != null && cls.getValue() == null) {
+ ((SharedDeployment)dep0).addParticipant(meta.senderNodeId(), meta.classLoaderId());
+ skipSearchDeployment = true;
+ }
+ }
+ }
+
if (!F.isEmpty(meta.participants())) {
Map participants = new LinkedHashMap<>();
@@ -376,7 +396,8 @@ else if (ctx.discovery().node(meta.senderNodeId()) == null) {
return null;
}
- dep = (SharedDeployment)searchDeploymentCache(meta);
+ if (!skipSearchDeployment)
+ dep = (SharedDeployment)searchDeploymentCache(meta);
if (dep == null) {
List deps = cache.get(meta.userVersion());
@@ -1243,8 +1264,6 @@ void onRemoved() {
/** {@inheritDoc} */
@Override public void onDeployed(Class> cls) {
- assert !Thread.holdsLock(mux);
-
boolean isTask = isTask(cls);
String msg = (isTask ? "Task" : "Class") + " was deployed in SHARED or CONTINUOUS mode: " + cls;
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/P2PClassLoadingIssues.java b/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/P2PClassLoadingIssues.java
index efdeb0ae2c1d3..ec9cc81d9cba2 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/P2PClassLoadingIssues.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/managers/deployment/P2PClassLoadingIssues.java
@@ -38,6 +38,14 @@ public static T rethrowDisarmedP2PClassLoadingFailure(NoClassDefFoundError e
throw error;
}
+ /** Wraps specific exception.
+ *
+ * @param e Exception to be wrapped.
+ */
+ public static P2PClassNotFoundException wrapWithP2PFailure(NoClassDefFoundError e) {
+ return new P2PClassNotFoundException("P2P class loading failed", e);
+ }
+
/**
* Returns @{code true} if the given Throwable is an error caused by a P2P class-loading failure.
*
diff --git a/modules/core/src/main/java/org/apache/ignite/internal/processors/datastreamer/DataStreamerUpdateJob.java b/modules/core/src/main/java/org/apache/ignite/internal/processors/datastreamer/DataStreamerUpdateJob.java
index ffe5b944d6f8e..b4a9bd0133b8f 100644
--- a/modules/core/src/main/java/org/apache/ignite/internal/processors/datastreamer/DataStreamerUpdateJob.java
+++ b/modules/core/src/main/java/org/apache/ignite/internal/processors/datastreamer/DataStreamerUpdateJob.java
@@ -21,7 +21,6 @@
import java.util.Map;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.GridKernalContext;
-import org.apache.ignite.internal.managers.deployment.P2PClassLoadingIssues;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
@@ -33,6 +32,8 @@
import org.apache.ignite.stream.StreamReceiver;
import org.jetbrains.annotations.Nullable;
+import static org.apache.ignite.internal.managers.deployment.P2PClassLoadingIssues.wrapWithP2PFailure;
+
/**
* Job to put entries to cache on affinity node.
*/
@@ -146,7 +147,7 @@ class DataStreamerUpdateJob implements GridPlainCallable