Skip to content

Commit

Permalink
HDDS-12002. Move up out() and err() to AbstractSubcommand (#7687)
Browse files Browse the repository at this point in the history
  • Loading branch information
chiacyu authored Jan 13, 2025
1 parent c387656 commit 2516ea6
Show file tree
Hide file tree
Showing 10 changed files with 57 additions and 115 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.ratis.util.MemoizedSupplier;
import picocli.CommandLine;

import java.io.PrintWriter;
import java.util.function.Supplier;

/** Base functionality for all Ozone subcommands. */
Expand Down Expand Up @@ -77,4 +78,12 @@ public OzoneConfiguration getOzoneConf() {
return conf;
}
}

protected PrintWriter out() {
return spec().commandLine().getOut();
}

protected PrintWriter err() {
return spec().commandLine().getErr();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hdds.cli.AbstractSubcommand;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
Expand All @@ -40,7 +41,6 @@

import java.io.File;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.List;
Expand All @@ -56,14 +56,11 @@
"for this datanode.",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class UpgradeSubcommand implements Callable<Void> {
public class UpgradeSubcommand extends AbstractSubcommand implements Callable<Void> {

private static final Logger LOG =
LoggerFactory.getLogger(UpgradeSubcommand.class);

@CommandLine.Spec
private static CommandLine.Model.CommandSpec spec;

@CommandLine.Option(names = {"--volume"},
required = false,
description = "volume path")
Expand Down Expand Up @@ -194,12 +191,4 @@ private OzoneConfiguration getConfiguration() {
}
return ozoneConfiguration;
}

private static PrintWriter err() {
return spec.commandLine().getErr();
}

private static PrintWriter out() {
return spec.commandLine().getOut();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,17 +42,16 @@
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.io.TempDir;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import picocli.CommandLine;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.nio.file.Path;
import java.security.PrivilegedExceptionAction;
Expand Down Expand Up @@ -102,10 +101,8 @@ public class TestOzoneTenantShell {
private static OzoneShell ozoneSh = null;
private static TenantShell tenantShell = null;

private final ByteArrayOutputStream out = new ByteArrayOutputStream();
private final ByteArrayOutputStream err = new ByteArrayOutputStream();
private static final PrintStream OLD_OUT = System.out;
private static final PrintStream OLD_ERR = System.err;
private final StringWriter out = new StringWriter();
private final StringWriter err = new StringWriter();

private static String omServiceId;
private static int numOfOMs;
Expand Down Expand Up @@ -173,9 +170,10 @@ public static void shutdown() {

@BeforeEach
public void setup() throws UnsupportedEncodingException {
System.setOut(new PrintStream(out, false, UTF_8.name()));
System.setErr(new PrintStream(err, false, UTF_8.name()));

tenantShell.getCmd().setOut(new PrintWriter(out));
tenantShell.getCmd().setErr(new PrintWriter(err));
ozoneSh.getCmd().setOut(new PrintWriter(out));
ozoneSh.getCmd().setErr(new PrintWriter(err));
// Suppress OMNotLeaderException in the log
GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.WARN);
// Enable debug logging for interested classes
Expand All @@ -187,27 +185,15 @@ public void setup() throws UnsupportedEncodingException {
GenericTestUtils.setLogLevel(OMRangerBGSyncService.LOG, Level.DEBUG);
}

@AfterEach
public void reset() {
// reset stream after each unit test
out.reset();
err.reset();

// restore system streams
System.setOut(OLD_OUT);
System.setErr(OLD_ERR);
}

/**
* Returns exit code.
*/
private int execute(GenericCli shell, String[] args) {
LOG.info("Executing shell command with args {}", Arrays.asList(args));
CommandLine cmd = shell.getCmd();

CommandLine.IExecutionExceptionHandler exceptionHandler =
(ex, commandLine, parseResult) -> {
new PrintStream(err, true, DEFAULT_ENCODING).println(ex.getMessage());
commandLine.getErr().println(ex.getMessage());
return commandLine.getCommandSpec().exitCodeOnExecutionException();
};

Expand Down Expand Up @@ -310,25 +296,25 @@ private String[] getHASetConfStrings(String[] existingArgs) {
/**
* Helper function that checks command output AND clears it.
*/
private void checkOutput(ByteArrayOutputStream stream, String stringToMatch,
private void checkOutput(StringWriter writer, String stringToMatch,
boolean exactMatch) throws IOException {
stream.flush();
final String str = stream.toString(DEFAULT_ENCODING);
writer.flush();
final String str = writer.toString();
checkOutput(str, stringToMatch, exactMatch);
stream.reset();
writer.getBuffer().setLength(0);
}

private void checkOutput(ByteArrayOutputStream stream, String stringToMatch,
private void checkOutput(StringWriter writer, String stringToMatch,
boolean exactMatch, boolean expectValidJSON) throws IOException {
stream.flush();
final String str = stream.toString(DEFAULT_ENCODING);
writer.flush();
final String str = writer.toString();
if (expectValidJSON) {
// Verify if the String can be parsed as a valid JSON
final ObjectMapper objectMapper = new ObjectMapper();
objectMapper.readTree(str);
}
checkOutput(str, stringToMatch, exactMatch);
stream.reset();
writer.getBuffer().setLength(0);
}

private void checkOutput(String str, String stringToMatch,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.hdds.cli.AbstractSubcommand;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
Expand Down Expand Up @@ -87,14 +88,11 @@
name = "scan",
description = "Parse specified metadataTable"
)
public class DBScanner implements Callable<Void> {
public class DBScanner extends AbstractSubcommand implements Callable<Void> {

public static final Logger LOG = LoggerFactory.getLogger(DBScanner.class);
private static final String SCHEMA_V3 = "V3";

@CommandLine.Spec
private static CommandLine.Model.CommandSpec spec;

@CommandLine.ParentCommand
private RDBParser parent;

Expand Down Expand Up @@ -214,14 +212,6 @@ public Void call() throws Exception {
return null;
}

private static PrintWriter err() {
return spec.commandLine().getErr();
}

private static PrintWriter out() {
return spec.commandLine().getOut();
}

public byte[] getValueObject(DBColumnFamilyDefinition dbColumnFamilyDefinition, String key) {
Class<?> keyType = dbColumnFamilyDefinition.getKeyType();
if (keyType.equals(String.class)) {
Expand Down Expand Up @@ -525,7 +515,7 @@ private boolean checkFilteredObjectCollection(Collection<?> valueObject, Map<Str
return false;
}

static Field getRequiredFieldFromAllFields(Class clazz, String fieldName) throws NoSuchFieldException {
Field getRequiredFieldFromAllFields(Class clazz, String fieldName) throws NoSuchFieldException {
List<Field> classFieldList = ValueSchema.getAllFields(clazz);
Field classField = null;
for (Field f : classFieldList) {
Expand Down Expand Up @@ -680,12 +670,12 @@ public static ObjectWriter getWriter() {
}


private static class Task implements Callable<Void> {
private class Task implements Callable<Void> {

private final DBColumnFamilyDefinition dbColumnFamilyDefinition;
private final ArrayList<ByteArrayKeyValue> batch;
private final LogWriter logWriter;
private static final ObjectWriter WRITER =
private final ObjectWriter writer =
JsonSerializationHelper.getWriter();
private final long sequenceId;
private final boolean withKey;
Expand Down Expand Up @@ -758,12 +748,12 @@ public Void call() {
}
String cid = key.toString().substring(0, index);
String blockId = key.toString().substring(index);
sb.append(WRITER.writeValueAsString(LongCodec.get()
sb.append(writer.writeValueAsString(LongCodec.get()
.fromPersistedFormat(
FixedLengthStringCodec.string2Bytes(cid)) +
KEY_SEPARATOR_SCHEMA_V3 + blockId));
} else {
sb.append(WRITER.writeValueAsString(key));
sb.append(writer.writeValueAsString(key));
}
sb.append(": ");
}
Expand All @@ -774,9 +764,9 @@ public Void call() {
if (valueFields != null) {
Map<String, Object> filteredValue = new HashMap<>();
filteredValue.putAll(getFieldsFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap));
sb.append(WRITER.writeValueAsString(filteredValue));
sb.append(writer.writeValueAsString(filteredValue));
} else {
sb.append(WRITER.writeValueAsString(o));
sb.append(writer.writeValueAsString(o));
}

results.add(sb.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.ozone.debug.ldb;

import org.apache.hadoop.hdds.cli.AbstractSubcommand;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.server.JsonUtils;
import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
Expand All @@ -29,7 +30,6 @@
import picocli.CommandLine;

import java.io.IOException;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
Expand All @@ -51,16 +51,13 @@
name = "value-schema",
description = "Schema of value in metadataTable"
)
public class ValueSchema implements Callable<Void> {
public class ValueSchema extends AbstractSubcommand implements Callable<Void> {

@CommandLine.ParentCommand
private RDBParser parent;

public static final Logger LOG = LoggerFactory.getLogger(ValueSchema.class);

@CommandLine.Spec
private static CommandLine.Model.CommandSpec spec;

@CommandLine.Option(names = {"--column_family", "--column-family", "--cf"},
required = true,
description = "Table name")
Expand All @@ -86,7 +83,7 @@ public Void call() throws Exception {

String dbPath = parent.getDbPath();
Map<String, Object> fields = new HashMap<>();
success = getValueFields(dbPath, fields, depth, tableName, dnDBSchemaVersion);
success = getValueFields(dbPath, fields);

out().println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(fields));

Expand All @@ -99,8 +96,7 @@ public Void call() throws Exception {
return null;
}

public static boolean getValueFields(String dbPath, Map<String, Object> valueSchema, int d, String table,
String dnDBSchemaVersion) {
public boolean getValueFields(String dbPath, Map<String, Object> valueSchema) {

dbPath = removeTrailingSlashIfNeeded(dbPath);
DBDefinitionFactory.setDnDBSchemaVersion(dnDBSchemaVersion);
Expand All @@ -110,14 +106,14 @@ public static boolean getValueFields(String dbPath, Map<String, Object> valueSch
return false;
}
final DBColumnFamilyDefinition<?, ?> columnFamilyDefinition =
dbDefinition.getColumnFamily(table);
dbDefinition.getColumnFamily(tableName);
if (columnFamilyDefinition == null) {
err().print("Error: Table with name '" + table + "' not found");
err().print("Error: Table with name '" + tableName + "' not found");
return false;
}

Class<?> c = columnFamilyDefinition.getValueType();
valueSchema.put(c.getSimpleName(), getFieldsStructure(c, d));
valueSchema.put(c.getSimpleName(), getFieldsStructure(c, depth));

return true;
}
Expand Down Expand Up @@ -162,14 +158,6 @@ public static List<Field> getAllFields(Class clazz) {
return result;
}

private static PrintWriter err() {
return spec.commandLine().getErr();
}

private static PrintWriter out() {
return spec.commandLine().getOut();
}

private static String removeTrailingSlashIfNeeded(String dbPath) {
if (dbPath.endsWith(OzoneConsts.OZONE_URI_DELIMITER)) {
dbPath = dbPath.substring(0, dbPath.length() - 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import org.apache.hadoop.hdds.cli.AbstractSubcommand;
import picocli.CommandLine;

import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Scanner;
import java.util.concurrent.Callable;
Expand Down Expand Up @@ -74,16 +73,6 @@ protected void error(String msg, Object... args) {
err().println(formatMessage(msg, args));
}

private PrintWriter out() {
return spec().commandLine()
.getOut();
}

private PrintWriter err() {
return spec().commandLine()
.getErr();
}

private String formatMessage(String msg, Object[] args) {
if (args != null && args.length > 0) {
msg = String.format(msg, args);
Expand Down
Loading

0 comments on commit 2516ea6

Please sign in to comment.