Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Redaction #1239

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.slack.astra.logstore.search.fieldRedaction;

import com.slack.astra.metadata.fieldredaction.FieldRedactionMetadata;
import com.slack.astra.metadata.fieldredaction.FieldRedactionMetadataStore;
import java.io.IOException;
import java.util.HashMap;
import org.apache.lucene.codecs.StoredFieldsReader;
Expand All @@ -16,11 +17,13 @@
*/
class RedactionLeafReader extends SequentialStoredFieldsLeafReader {
private final HashMap<String, FieldRedactionMetadata> fieldRedactionsMap;
private final FieldRedactionMetadataStore fieldRedactionMetadataStore;

public RedactionLeafReader(
LeafReader in, HashMap<String, FieldRedactionMetadata> fieldRedactionsMap) {
LeafReader in, FieldRedactionMetadataStore fieldRedactionMetadataStore) {
super(in);
this.fieldRedactionsMap = fieldRedactionsMap;
this.fieldRedactionMetadataStore = fieldRedactionMetadataStore;
this.fieldRedactionsMap = new HashMap<>();
}

@Override
Expand All @@ -36,12 +39,14 @@ public StoredFields storedFields() throws IOException {
// RedactionStoredFieldVisitor can be called here or in the RedactedFieldReader
@Override
public void document(int docID, StoredFieldVisitor visitor) throws IOException {
getRedactedFields();
visitor = new RedactionStoredFieldVisitor(visitor, fieldRedactionsMap);
in.document(docID, visitor);
}

@Override
protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) {
getRedactedFields();
return new RedactedFieldReader(reader, fieldRedactionsMap);
}

Expand All @@ -59,4 +64,18 @@ public CacheHelper getReaderCacheHelper() {
protected void doClose() throws IOException {
super.doClose();
}

// We want to put the ZK store into a hashmap (low-cost lookups for redacted fields).
// Because we do not have a listener on the ZK store, we want to put the values into the map
// per-search, which happens when document() or doGetSequentialStoredFieldsReader() is called.
protected void getRedactedFields() {
if (this.fieldRedactionMetadataStore != null) {
fieldRedactionMetadataStore
.listSync()
.forEach(
redaction -> {
fieldRedactionsMap.put(redaction.getName(), redaction);
});
}
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package com.slack.astra.logstore.search.fieldRedaction;

import com.slack.astra.metadata.fieldredaction.FieldRedactionMetadata;
import com.slack.astra.metadata.fieldredaction.FieldRedactionMetadataStore;
import java.util.HashMap;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.LeafReader;

Expand All @@ -11,22 +9,14 @@
* reader, and creates a RedactionLeafReader.
*/
class RedactionSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper {
private final HashMap<String, FieldRedactionMetadata> fieldRedactionsMap;
private final FieldRedactionMetadataStore fieldRedactionMetadataStore;

public RedactionSubReaderWrapper(FieldRedactionMetadataStore fieldRedactionMetadataStore) {
this.fieldRedactionsMap = new HashMap<>();
if (fieldRedactionMetadataStore != null) {
fieldRedactionMetadataStore
.listSync()
.forEach(
redaction -> {
fieldRedactionsMap.put(redaction.getName(), redaction);
});
}
this.fieldRedactionMetadataStore = fieldRedactionMetadataStore;
}

@Override
public LeafReader wrap(LeafReader reader) {
return new RedactionLeafReader(reader, this.fieldRedactionsMap);
return new RedactionLeafReader(reader, this.fieldRedactionMetadataStore);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ public void setup() throws Exception {
.setZkSessionTimeoutMs(Integer.MAX_VALUE)
.setZkConnectionTimeoutMs(Integer.MAX_VALUE)
.setSleepBetweenRetriesMs(1000)
.setZkCacheInitTimeoutMs(1000)
.build();

MeterRegistry meterRegistry = new SimpleMeterRegistry();
Expand All @@ -105,14 +106,6 @@ public void testRedactionWithIncludeFilters() throws Exception {
long start = Instant.now().minus(1, ChronoUnit.DAYS).toEpochMilli();
long end = Instant.now().plus(2, ChronoUnit.DAYS).toEpochMilli();

fieldRedactionMetadataStore.createSync(
new FieldRedactionMetadata(redactionName, fieldName, start, end));

await()
.until(
() ->
AstraMetadataTestUtils.listSyncUncached(fieldRedactionMetadataStore).size() == 1);

// search
TemporaryLogStoreAndSearcherExtension featureFlagEnabledStrictLogStore =
new TemporaryLogStoreAndSearcherExtension(true, fieldRedactionMetadataStore);
Expand All @@ -138,6 +131,18 @@ public void testRedactionWithIncludeFilters() throws Exception {
.putIncludeFields("message", true)
.build();

// add redaction between log being added and searched to test that the redaction map gets
// updated
// a previous change passed this test when the redaction was added before the
// DirectoryReader was created and redaction still did not work
fieldRedactionMetadataStore.createSync(
new FieldRedactionMetadata(redactionName, fieldName, start, end));

await()
.until(
() ->
AstraMetadataTestUtils.listSyncUncached(fieldRedactionMetadataStore).size() == 1);

List<LogMessage> messages =
featureFlagEnabledStrictLogStore.logSearcher.search(
TEST_DATASET_NAME,
Expand Down