diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java index 550a68ae07e..00d907c5ce5 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/AbstractSubcommand.java @@ -21,6 +21,7 @@ import org.apache.ratis.util.MemoizedSupplier; import picocli.CommandLine; +import java.io.PrintWriter; import java.util.function.Supplier; /** Base functionality for all Ozone subcommands. */ @@ -77,4 +78,12 @@ public OzoneConfiguration getOzoneConf() { return conf; } } + + protected PrintWriter out() { + return spec().commandLine().getOut(); + } + + protected PrintWriter err() { + return spec().commandLine().getErr(); + } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java index a94f631b5bc..3aeb7813a09 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/UpgradeSubcommand.java @@ -20,6 +20,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.hdds.cli.AbstractSubcommand; import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; @@ -40,7 +41,6 @@ import java.io.File; import java.io.InputStreamReader; -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.List; @@ -56,14 +56,11 @@ "for this datanode.", mixinStandardHelpOptions = true, versionProvider = HddsVersionProvider.class) -public class UpgradeSubcommand implements Callable { +public class UpgradeSubcommand extends AbstractSubcommand implements Callable { private static final Logger LOG = LoggerFactory.getLogger(UpgradeSubcommand.class); - @CommandLine.Spec - private static CommandLine.Model.CommandSpec spec; - @CommandLine.Option(names = {"--volume"}, required = false, description = "volume path") @@ -194,12 +191,4 @@ private OzoneConfiguration getConfiguration() { } return ozoneConfiguration; } - - private static PrintWriter err() { - return spec.commandLine().getErr(); - } - - private static PrintWriter out() { - return spec.commandLine().getOut(); - } } diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java index 09770b097f8..409d69e9980 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneTenantShell.java @@ -42,17 +42,16 @@ import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.io.TempDir; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import picocli.CommandLine; -import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; -import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.nio.file.Path; import java.security.PrivilegedExceptionAction; @@ -102,10 +101,8 @@ public class TestOzoneTenantShell { private static OzoneShell ozoneSh = null; private static TenantShell tenantShell = null; - private final ByteArrayOutputStream out = new ByteArrayOutputStream(); - private final ByteArrayOutputStream err = new ByteArrayOutputStream(); - private static final PrintStream OLD_OUT = System.out; - private static final PrintStream OLD_ERR = System.err; + private final StringWriter out = new StringWriter(); + private final StringWriter err = new StringWriter(); private static String omServiceId; private static int numOfOMs; @@ -173,9 +170,10 @@ public static void shutdown() { @BeforeEach public void setup() throws UnsupportedEncodingException { - System.setOut(new PrintStream(out, false, UTF_8.name())); - System.setErr(new PrintStream(err, false, UTF_8.name())); - + tenantShell.getCmd().setOut(new PrintWriter(out)); + tenantShell.getCmd().setErr(new PrintWriter(err)); + ozoneSh.getCmd().setOut(new PrintWriter(out)); + ozoneSh.getCmd().setErr(new PrintWriter(err)); // Suppress OMNotLeaderException in the log GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.WARN); // Enable debug logging for interested classes @@ -187,27 +185,15 @@ public void setup() throws UnsupportedEncodingException { GenericTestUtils.setLogLevel(OMRangerBGSyncService.LOG, Level.DEBUG); } - @AfterEach - public void reset() { - // reset stream after each unit test - out.reset(); - err.reset(); - - // restore system streams - System.setOut(OLD_OUT); - System.setErr(OLD_ERR); - } - /** * Returns exit code. */ private int execute(GenericCli shell, String[] args) { LOG.info("Executing shell command with args {}", Arrays.asList(args)); CommandLine cmd = shell.getCmd(); - CommandLine.IExecutionExceptionHandler exceptionHandler = (ex, commandLine, parseResult) -> { - new PrintStream(err, true, DEFAULT_ENCODING).println(ex.getMessage()); + commandLine.getErr().println(ex.getMessage()); return commandLine.getCommandSpec().exitCodeOnExecutionException(); }; @@ -310,25 +296,25 @@ private String[] getHASetConfStrings(String[] existingArgs) { /** * Helper function that checks command output AND clears it. */ - private void checkOutput(ByteArrayOutputStream stream, String stringToMatch, + private void checkOutput(StringWriter writer, String stringToMatch, boolean exactMatch) throws IOException { - stream.flush(); - final String str = stream.toString(DEFAULT_ENCODING); + writer.flush(); + final String str = writer.toString(); checkOutput(str, stringToMatch, exactMatch); - stream.reset(); + writer.getBuffer().setLength(0); } - private void checkOutput(ByteArrayOutputStream stream, String stringToMatch, + private void checkOutput(StringWriter writer, String stringToMatch, boolean exactMatch, boolean expectValidJSON) throws IOException { - stream.flush(); - final String str = stream.toString(DEFAULT_ENCODING); + writer.flush(); + final String str = writer.toString(); if (expectValidJSON) { // Verify if the String can be parsed as a valid JSON final ObjectMapper objectMapper = new ObjectMapper(); objectMapper.readTree(str); } checkOutput(str, stringToMatch, exactMatch); - stream.reset(); + writer.getBuffer().setLength(0); } private void checkOutput(String str, String stringToMatch, diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java index 6fbbd1a3083..cb432ab45ab 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/DBScanner.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hdds.cli.AbstractSubcommand; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; @@ -87,14 +88,11 @@ name = "scan", description = "Parse specified metadataTable" ) -public class DBScanner implements Callable { +public class DBScanner extends AbstractSubcommand implements Callable { public static final Logger LOG = LoggerFactory.getLogger(DBScanner.class); private static final String SCHEMA_V3 = "V3"; - @CommandLine.Spec - private static CommandLine.Model.CommandSpec spec; - @CommandLine.ParentCommand private RDBParser parent; @@ -214,14 +212,6 @@ public Void call() throws Exception { return null; } - private static PrintWriter err() { - return spec.commandLine().getErr(); - } - - private static PrintWriter out() { - return spec.commandLine().getOut(); - } - public byte[] getValueObject(DBColumnFamilyDefinition dbColumnFamilyDefinition, String key) { Class keyType = dbColumnFamilyDefinition.getKeyType(); if (keyType.equals(String.class)) { @@ -525,7 +515,7 @@ private boolean checkFilteredObjectCollection(Collection valueObject, Map classFieldList = ValueSchema.getAllFields(clazz); Field classField = null; for (Field f : classFieldList) { @@ -680,12 +670,12 @@ public static ObjectWriter getWriter() { } - private static class Task implements Callable { + private class Task implements Callable { private final DBColumnFamilyDefinition dbColumnFamilyDefinition; private final ArrayList batch; private final LogWriter logWriter; - private static final ObjectWriter WRITER = + private final ObjectWriter writer = JsonSerializationHelper.getWriter(); private final long sequenceId; private final boolean withKey; @@ -758,12 +748,12 @@ public Void call() { } String cid = key.toString().substring(0, index); String blockId = key.toString().substring(index); - sb.append(WRITER.writeValueAsString(LongCodec.get() + sb.append(writer.writeValueAsString(LongCodec.get() .fromPersistedFormat( FixedLengthStringCodec.string2Bytes(cid)) + KEY_SEPARATOR_SCHEMA_V3 + blockId)); } else { - sb.append(WRITER.writeValueAsString(key)); + sb.append(writer.writeValueAsString(key)); } sb.append(": "); } @@ -774,9 +764,9 @@ public Void call() { if (valueFields != null) { Map filteredValue = new HashMap<>(); filteredValue.putAll(getFieldsFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap)); - sb.append(WRITER.writeValueAsString(filteredValue)); + sb.append(writer.writeValueAsString(filteredValue)); } else { - sb.append(WRITER.writeValueAsString(o)); + sb.append(writer.writeValueAsString(o)); } results.add(sb.toString()); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java index 4b8eb3b3208..0c2fb302be9 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/ldb/ValueSchema.java @@ -18,6 +18,7 @@ package org.apache.hadoop.ozone.debug.ldb; +import org.apache.hadoop.hdds.cli.AbstractSubcommand; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.server.JsonUtils; import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition; @@ -29,7 +30,6 @@ import picocli.CommandLine; import java.io.IOException; -import java.io.PrintWriter; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; @@ -51,16 +51,13 @@ name = "value-schema", description = "Schema of value in metadataTable" ) -public class ValueSchema implements Callable { +public class ValueSchema extends AbstractSubcommand implements Callable { @CommandLine.ParentCommand private RDBParser parent; public static final Logger LOG = LoggerFactory.getLogger(ValueSchema.class); - @CommandLine.Spec - private static CommandLine.Model.CommandSpec spec; - @CommandLine.Option(names = {"--column_family", "--column-family", "--cf"}, required = true, description = "Table name") @@ -86,7 +83,7 @@ public Void call() throws Exception { String dbPath = parent.getDbPath(); Map fields = new HashMap<>(); - success = getValueFields(dbPath, fields, depth, tableName, dnDBSchemaVersion); + success = getValueFields(dbPath, fields); out().println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(fields)); @@ -99,8 +96,7 @@ public Void call() throws Exception { return null; } - public static boolean getValueFields(String dbPath, Map valueSchema, int d, String table, - String dnDBSchemaVersion) { + public boolean getValueFields(String dbPath, Map valueSchema) { dbPath = removeTrailingSlashIfNeeded(dbPath); DBDefinitionFactory.setDnDBSchemaVersion(dnDBSchemaVersion); @@ -110,14 +106,14 @@ public static boolean getValueFields(String dbPath, Map valueSch return false; } final DBColumnFamilyDefinition columnFamilyDefinition = - dbDefinition.getColumnFamily(table); + dbDefinition.getColumnFamily(tableName); if (columnFamilyDefinition == null) { - err().print("Error: Table with name '" + table + "' not found"); + err().print("Error: Table with name '" + tableName + "' not found"); return false; } Class c = columnFamilyDefinition.getValueType(); - valueSchema.put(c.getSimpleName(), getFieldsStructure(c, d)); + valueSchema.put(c.getSimpleName(), getFieldsStructure(c, depth)); return true; } @@ -162,14 +158,6 @@ public static List getAllFields(Class clazz) { return result; } - private static PrintWriter err() { - return spec.commandLine().getErr(); - } - - private static PrintWriter out() { - return spec.commandLine().getOut(); - } - private static String removeTrailingSlashIfNeeded(String dbPath) { if (dbPath.endsWith(OzoneConsts.OZONE_URI_DELIMITER)) { dbPath = dbPath.substring(0, dbPath.length() - 1); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java index a64cacb8b21..d873d07645d 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/RepairTool.java @@ -20,7 +20,6 @@ import org.apache.hadoop.hdds.cli.AbstractSubcommand; import picocli.CommandLine; -import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.Scanner; import java.util.concurrent.Callable; @@ -74,16 +73,6 @@ protected void error(String msg, Object... args) { err().println(formatMessage(msg, args)); } - private PrintWriter out() { - return spec().commandLine() - .getOut(); - } - - private PrintWriter err() { - return spec().commandLine() - .getErr(); - } - private String formatMessage(String msg, Object[] args) { if (args != null && args.length > 0) { msg = String.format(msg, args); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java index db7294e2795..36eada9b4f9 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/Handler.java @@ -19,7 +19,6 @@ package org.apache.hadoop.ozone.shell; import java.io.IOException; -import java.io.PrintStream; import java.util.Iterator; import java.util.concurrent.Callable; @@ -97,7 +96,7 @@ protected boolean securityEnabled() { } protected void printObjectAsJson(Object o) throws IOException { - out().println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(o)); + System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(o)); } /** @@ -123,12 +122,4 @@ protected OzoneConfiguration getConf() { return conf; } - protected PrintStream out() { - return System.out; - } - - protected PrintStream err() { - return System.err; - } - } diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java index ae5b5ad566e..0129737e0ea 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/OzoneAddress.java @@ -18,7 +18,7 @@ package org.apache.hadoop.ozone.shell; import java.io.IOException; -import java.io.PrintStream; +import java.io.PrintWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; @@ -452,7 +452,7 @@ private OzoneObj.ResourceType getResourceType() { return null; } - public void print(PrintStream out) { + public void print(PrintWriter out) { if (!volumeName.isEmpty()) { out.printf("Volume Name : %s%n", volumeName); } diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java index aa1675d28eb..813c13a1cfe 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/acl/AclOption.java @@ -24,7 +24,7 @@ import picocli.CommandLine; import java.io.IOException; -import java.io.PrintStream; +import java.io.PrintWriter; import java.util.List; /** @@ -52,7 +52,7 @@ private List getAclList() { return ImmutableList.copyOf(values); } - public void addTo(OzoneObj obj, ObjectStore objectStore, PrintStream out) + public void addTo(OzoneObj obj, ObjectStore objectStore, PrintWriter out) throws IOException { for (OzoneAcl acl : getAclList()) { boolean result = objectStore.addAcl(obj, acl); @@ -65,7 +65,7 @@ public void addTo(OzoneObj obj, ObjectStore objectStore, PrintStream out) } } - public void removeFrom(OzoneObj obj, ObjectStore objectStore, PrintStream out) + public void removeFrom(OzoneObj obj, ObjectStore objectStore, PrintWriter out) throws IOException { for (OzoneAcl acl : getAclList()) { boolean result = objectStore.removeAcl(obj, acl); @@ -78,7 +78,7 @@ public void removeFrom(OzoneObj obj, ObjectStore objectStore, PrintStream out) } } - public void setOn(OzoneObj obj, ObjectStore objectStore, PrintStream out) + public void setOn(OzoneObj obj, ObjectStore objectStore, PrintWriter out) throws IOException { objectStore.setAcl(obj, getAclList()); out.println("ACLs set successfully."); diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java index ebbb9509c94..e11c07dcf3b 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/shell/snapshot/SnapshotDiffHandler.java @@ -35,7 +35,7 @@ import picocli.CommandLine; import java.io.IOException; -import java.io.PrintStream; +import java.io.PrintWriter; import static org.apache.hadoop.hdds.server.JsonUtils.toJsonStringWithDefaultPrettyPrinter; @@ -117,19 +117,19 @@ private void getSnapshotDiff(ObjectStore store, String volumeName, String bucketName) throws IOException { SnapshotDiffResponse diffResponse = store.snapshotDiff(volumeName, bucketName, fromSnapshot, toSnapshot, token, pageSize, forceFullDiff, diffDisableNativeLibs); - try (PrintStream stream = out()) { + try (PrintWriter writer = out()) { if (json) { - stream.println(toJsonStringWithDefaultPrettyPrinter(getJsonObject(diffResponse))); + writer.println(toJsonStringWithDefaultPrettyPrinter(getJsonObject(diffResponse))); } else { - stream.println(diffResponse); + writer.println(diffResponse); } } } private void cancelSnapshotDiff(ObjectStore store, String volumeName, String bucketName) throws IOException { - try (PrintStream stream = out()) { - stream.println(store.cancelSnapshotDiff(volumeName, bucketName, fromSnapshot, toSnapshot)); + try (PrintWriter writer = out()) { + writer.println(store.cancelSnapshotDiff(volumeName, bucketName, fromSnapshot, toSnapshot)); } }