Skip to content

Commit

Permalink
Fix kahadb test failing with JDK 21 due to deprecated SecurityManager
Browse files Browse the repository at this point in the history
  • Loading branch information
gurpartap0306 committed Nov 30, 2024
1 parent bd91d97 commit 67528b1
Show file tree
Hide file tree
Showing 7 changed files with 135 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.activemq.store.kahadb.data.KahaLocalTransactionId;
import org.apache.activemq.store.kahadb.data.KahaTransactionInfo;
import org.apache.activemq.store.kahadb.data.KahaXATransactionId;
import org.apache.activemq.store.kahadb.disk.journal.DataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.Journal.JournalDiskSyncStrategy;
import org.apache.activemq.usage.SystemUsage;
import org.apache.activemq.util.ServiceStopper;
Expand Down Expand Up @@ -597,6 +598,10 @@ public void setArchiveDataLogs(boolean archiveDataLogs) {
letter.setArchiveDataLogs(archiveDataLogs);
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
letter.setDataFileFactory(dataFileFactory);
}

public File getDirectoryArchive() {
return letter.getDirectoryArchive();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
import org.apache.activemq.store.kahadb.disk.index.BTreeVisitor;
import org.apache.activemq.store.kahadb.disk.index.ListIndex;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.DataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.Journal;
import org.apache.activemq.store.kahadb.disk.journal.Journal.JournalDiskSyncStrategy;
import org.apache.activemq.store.kahadb.disk.journal.Location;
Expand Down Expand Up @@ -266,6 +267,7 @@ public enum PurgeRecoveredXATransactionStrategy {

protected JournalDiskSyncStrategy journalDiskSyncStrategy = JournalDiskSyncStrategy.ALWAYS;
protected boolean archiveDataLogs;
protected DataFileFactory dataFileFactory;
protected File directoryArchive;
protected AtomicLong journalSize = new AtomicLong(0);
long journalDiskSyncInterval = 1000;
Expand Down Expand Up @@ -3421,6 +3423,9 @@ protected Journal createJournal() throws IOException {
IOHelper.mkdirs(getDirectoryArchive());
manager.setDirectoryArchive(getDirectoryArchive());
}
if (getDataFileFactory() != null) {
manager.setDataFileFactory(getDataFileFactory());
}
return manager;
}

Expand Down Expand Up @@ -3653,6 +3658,14 @@ public void setArchiveDataLogs(boolean archiveDataLogs) {
this.archiveDataLogs = archiveDataLogs;
}

public DataFileFactory getDataFileFactory() {
return this.dataFileFactory;
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
this.dataFileFactory = dataFileFactory;
}

/**
* @return the directoryArchive
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.disk.journal;

import java.io.File;

public interface DataFileFactory {
DataFile create(File file, int number);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.disk.journal;

import java.io.File;

public class DefaultDataFileFactory implements DataFileFactory {
@Override
public DataFile create(File file, int number) {
return new DataFile(file, number);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,8 @@ private static byte[] createEofBatchAndLocationRecord() {

protected JournalDiskSyncStrategy journalDiskSyncStrategy = JournalDiskSyncStrategy.ALWAYS;

protected DataFileFactory dataFileFactory = new DefaultDataFileFactory();

public interface DataFileRemovedListener {
void fileRemoved(DataFile datafile);
}
Expand Down Expand Up @@ -272,7 +274,7 @@ public boolean accept(File dir, String n) {
String n = file.getName();
String numStr = n.substring(filePrefix.length(), n.length()-fileSuffix.length());
int num = Integer.parseInt(numStr);
DataFile dataFile = new DataFile(file, num);
DataFile dataFile = dataFileFactory.create(file, num);
fileMap.put(dataFile.getDataFileId(), dataFile);
totalLength.addAndGet(dataFile.getLength());
} catch (NumberFormatException e) {
Expand Down Expand Up @@ -687,7 +689,7 @@ public void run() {
private DataFile newDataFile() throws IOException {
int nextNum = nextDataFileId++;
File file = getFile(nextNum);
DataFile nextWriteFile = new DataFile(file, nextNum);
DataFile nextWriteFile = dataFileFactory.create(file, nextNum);
preallocateEntireJournalDataFile(nextWriteFile.appendRandomAccessFile());
return nextWriteFile;
}
Expand All @@ -697,7 +699,7 @@ public DataFile reserveDataFile() {
synchronized (dataFileIdLock) {
int nextNum = nextDataFileId++;
File file = getFile(nextNum);
DataFile reservedDataFile = new DataFile(file, nextNum);
DataFile reservedDataFile = dataFileFactory.create(file, nextNum);
synchronized (currentDataFile) {
fileMap.put(reservedDataFile.getDataFileId(), reservedDataFile);
fileByFileMap.put(file, reservedDataFile);
Expand Down Expand Up @@ -1128,6 +1130,14 @@ public void setCheckForCorruptionOnStartup(boolean checkForCorruptionOnStartup)
this.checkForCorruptionOnStartup = checkForCorruptionOnStartup;
}

public void setDataFileFactory(DataFileFactory dataFileFactory) {
this.dataFileFactory = dataFileFactory;
}

public DataFileFactory getDataFileFactory() {
return this.dataFileFactory;
}

public void setWriteBatchSize(int writeBatchSize) {
this.writeBatchSize = writeBatchSize;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.store.kahadb.disk.journal;

import org.apache.activemq.util.IOHelper;

import java.io.File;
import java.io.IOException;

import static org.apache.activemq.store.kahadb.disk.journal.Journal.DEFAULT_ARCHIVE_DIRECTORY;

public class TestDataFileFactory implements DataFileFactory {
@Override
public DataFile create(File file, int number) {
return new TestDataFile(file, number);
}

public class TestDataFile extends DataFile {

TestDataFile(File file, int number) {
super(file, number);
}

@Override
public synchronized void move(File targetDirectory) throws IOException {
if (targetDirectory.getName().contains(DEFAULT_ARCHIVE_DIRECTORY) && this.dataFileId > 4) {
throw new SecurityException("No Perms to write to archive times:" + this.dataFileId);
}
IOHelper.moveFile(file, targetDirectory);
}
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.store.kahadb.disk.journal.DataFile;
import org.apache.activemq.store.kahadb.disk.journal.DataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.DefaultDataFileFactory;
import org.apache.activemq.store.kahadb.disk.journal.TestDataFileFactory;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;
Expand Down Expand Up @@ -55,6 +58,8 @@ public class JournalArchiveTest {
private final Destination destination = new ActiveMQQueue("Test");
private KahaDBPersistenceAdapter adapter;

private DataFileFactory dataFileFactory;

protected void startBroker() throws Exception {
doStartBroker(true);
}
Expand Down Expand Up @@ -104,6 +109,7 @@ protected void configurePersistence(BrokerService brokerService) throws Exceptio
adapter.setCheckForCorruptJournalFiles(true);

adapter.setArchiveDataLogs(true);
adapter.setDataFileFactory(dataFileFactory);
}

@After
Expand All @@ -119,16 +125,7 @@ public void tearDown() throws Exception {
public void testRecoveryOnArchiveFailure() throws Exception {
final AtomicInteger atomicInteger = new AtomicInteger();

System.setSecurityManager(new SecurityManager() {
public void checkPermission(Permission perm) {}
public void checkPermission(Permission perm, Object context) {}

public void checkWrite(String file) {
if (file.contains(DEFAULT_ARCHIVE_DIRECTORY) && atomicInteger.incrementAndGet() > 4) {
throw new SecurityException("No Perms to write to archive times:" + atomicInteger.get());
}
}
});
this.dataFileFactory = new TestDataFileFactory();
startBroker();

int sent = produceMessagesToConsumeMultipleDataFiles(50);
Expand All @@ -150,8 +147,7 @@ public void run() {
assertEquals("all message received", sent, received);
assertTrue("broker got shutdown on page in error", gotShutdown.await(10, TimeUnit.SECONDS));

// no restrictions
System.setSecurityManager(null);
this.dataFileFactory = new DefaultDataFileFactory();

int numFilesAfterRestart = 0;
try {
Expand Down

0 comments on commit 67528b1

Please sign in to comment.