Skip to content

Commit

Permalink
PARQUET-2419: Reduce noisy logging when running test suite (#1253)
Browse files Browse the repository at this point in the history
  • Loading branch information
amousavigourabi authored Jan 26, 2024
1 parent 8264d8b commit ed308ff
Show file tree
Hide file tree
Showing 14 changed files with 58 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-hadoop2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate -Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
mvn verify --batch-mode -P hadoop2 javadoc:javadoc -Pci-test
mvn verify --batch-mode -P hadoop2 javadoc:javadoc
2 changes: 1 addition & 1 deletion .github/workflows/ci-hadoop3.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate -Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
mvn verify --batch-mode javadoc:javadoc -Pci-test
mvn verify --batch-mode javadoc:javadoc
2 changes: 1 addition & 1 deletion .github/workflows/vector-plugins.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ jobs:
run: |
EXTRA_JAVA_TEST_ARGS=$(mvn help:evaluate -Dexpression=extraJavaTestArgs -q -DforceStdout)
export MAVEN_OPTS="$MAVEN_OPTS $EXTRA_JAVA_TEST_ARGS"
mvn verify --batch-mode -Pvector-plugins javadoc:javadoc -Pci-test -pl parquet-plugins/parquet-encoding-vector,parquet-plugins/parquet-plugins-benchmarks -am
mvn verify --batch-mode -Pvector-plugins javadoc:javadoc -pl parquet-plugins/parquet-encoding-vector,parquet-plugins/parquet-plugins-benchmarks -am
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,13 @@
import org.apache.parquet.column.statistics.LongStatistics;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TestMemPageStore {

private static final Logger LOG = LoggerFactory.getLogger(TestMemPageStore.class);

private String[] path = {"foo", "bar"};

@Test
Expand All @@ -48,12 +52,12 @@ public void test() throws IOException {
pageWriter.writePage(BytesInput.from(new byte[735]), 209, stats, BIT_PACKED, BIT_PACKED, PLAIN);
PageReader pageReader = memPageStore.getPageReader(col);
long totalValueCount = pageReader.getTotalValueCount();
System.out.println(totalValueCount);
LOG.info(String.valueOf(totalValueCount));
int total = 0;
do {
DataPage readPage = pageReader.readPage();
total += readPage.getValueCount();
System.out.println(readPage);
LOG.info(readPage.toString());
// TODO: assert
} while (total < totalValueCount);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ public ExpectationValidatingRecordConsumer(Deque<String> expectations) {
}

private void validate(String got) {
// System.out.println(" \"" + got + "\";");
assertEquals("event #" + count, expectations.pop(), got);
++count;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* This class contains test cases to validate each data type encoding.
Expand All @@ -72,6 +74,9 @@
*/
@RunWith(Parameterized.class)
public class FileEncodingsIT {

private static final Logger LOG = LoggerFactory.getLogger(FileEncodingsIT.class);

private static final int RANDOM_SEED = 1;
private static final int RECORD_COUNT = 2000000;
private static final int FIXED_LENGTH = 60;
Expand Down Expand Up @@ -156,7 +161,7 @@ public void testFileEncodingsWithoutDictionary() throws Exception {
* This loop will make sure to test future writer versions added to WriterVersion enum.
*/
for (WriterVersion writerVersion : WriterVersion.values()) {
System.out.println(String.format(
LOG.info(String.format(
"Testing %s/%s/%s encodings using ROW_GROUP_SIZE=%d PAGE_SIZE=%d",
writerVersion, this.paramTypeName, this.compression, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE));

Expand All @@ -182,7 +187,7 @@ public void testFileEncodingsWithDictionary() throws Exception {
* This loop will make sure to test future writer versions added to WriterVersion enum.
*/
for (WriterVersion writerVersion : WriterVersion.values()) {
System.out.println(String.format(
LOG.info(String.format(
"Testing %s/%s/%s + DICTIONARY encodings using ROW_GROUP_SIZE=%d PAGE_SIZE=%d",
writerVersion, this.paramTypeName, this.compression, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,17 @@
import org.apache.parquet.schema.MessageTypeParser;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* DeprecatedParquetInputFormat is used by cascading. It initializes the recordReader using an initialize method with
* different parameters than ParquetInputFormat
*/
public class DeprecatedInputFormatTest {

private static final Logger LOG = LoggerFactory.getLogger(DeprecatedInputFormatTest.class);

final Path parquetPath = new Path("target/test/example/TestInputOutputFormat/parquet");
final Path inputPath = new Path("src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java");
final Path outputPath = new Path("target/test/example/TestInputOutputFormat/out");
Expand Down Expand Up @@ -317,10 +322,10 @@ public void testReadWriteWithoutCounter() throws Exception {

private void waitForJob(Job job) throws InterruptedException, IOException {
while (!job.isComplete()) {
System.out.println("waiting for job " + job.getJobName());
LOG.info("waiting for job " + job.getJobName());
sleep(100);
}
System.out.println("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
LOG.info("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
if (!job.isSuccessful()) {
throw new RuntimeException("job failed " + job.getJobName());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,13 @@
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TestDirectCodecFactory {

private static final Logger LOG = LoggerFactory.getLogger(TestDirectCodecFactory.class);

private enum Decompression {
ON_HEAP,
OFF_HEAP,
Expand Down Expand Up @@ -121,7 +125,7 @@ private void test(int size, CompressionCodecName codec, boolean useOnHeapCompres
final String msg = String.format(
"Failure while testing Codec: %s, OnHeapCompressionInput: %s, Decompression Mode: %s, Data Size: %d",
codec.name(), useOnHeapCompression, decomp.name(), size);
System.out.println(msg);
LOG.error(msg);
throw new RuntimeException(msg, e);
} finally {
if (rawBuf != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,13 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TestStatistics {

private static final Logger LOG = LoggerFactory.getLogger(TestStatistics.class);

private static final int MEGABYTE = 1 << 20;
private static final long RANDOM_SEED = 1441990701846L; // System.currentTimeMillis();

Expand Down Expand Up @@ -481,7 +486,7 @@ public void testStatistics() throws IOException {
File file = folder.newFile("test_file.parquet");
file.delete();

System.out.println(String.format("RANDOM SEED: %s", RANDOM_SEED));
LOG.info(String.format("RANDOM SEED: %s", RANDOM_SEED));

Random random = new Random(RANDOM_SEED);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,13 @@
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TestParquetLoader {

private static final Logger LOG = LoggerFactory.getLogger(TestParquetLoader.class);

@Test
public void testSchema() throws Exception {
String location = "garbage";
Expand Down Expand Up @@ -243,7 +248,7 @@ public void testTypePersuasion() throws Exception {
+ DataType.findTypeName(types[(i + 4) % types.length]) + "," + " b:"
+ DataType.findTypeName(types[(i + 5) % types.length]) + "');";

System.out.println("Query: " + query);
LOG.info("Query: " + query);
pigServer.registerQuery(query);
pigServer.registerQuery("STORE B into 'out" + i + "' using mock.Storage();");
pigServer.executeBatch();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,13 @@
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TestSummary {

private static final Logger LOG = LoggerFactory.getLogger(TestSummary.class);

private static final TupleFactory tf = TupleFactory.getInstance();
private static final BagFactory bf = BagFactory.getInstance();

Expand Down Expand Up @@ -103,9 +107,8 @@ public void testAlgebraic() throws IOException {

private void validate(String result, int factor) throws IOException {
TupleSummaryData s = SummaryData.fromJSON(result, TupleSummaryData.class);
// System.out.println(SummaryData.toPrettyJSON(s));
assertEquals(9 * factor, s.getCount());
assertEquals(1 * factor, s.getFields().get(0).getNull().longValue());
assertEquals(factor, s.getFields().get(0).getNull().longValue());
assertEquals(7 * factor, s.getFields().get(0).getBag().getCount());
assertEquals(
18 * factor,
Expand Down Expand Up @@ -141,10 +144,10 @@ public void testPigScript() throws Exception {
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
pigServer.registerQuery("B = FOREACH (GROUP A ALL) GENERATE " + Summary.class.getName() + "(A);");
pigServer.registerQuery("STORE B INTO 'out' USING mock.Storage();");
System.out.println(data.get("out").get(0).get(0));
LOG.info(String.valueOf(data.get("out").get(0).get(0)));
TupleSummaryData s =
SummaryData.fromJSON((String) data.get("out").get(0).get(0), TupleSummaryData.class);
System.out.println(s);
LOG.info(String.valueOf(s));
}

@Test
Expand All @@ -163,7 +166,7 @@ public void testMaxIsZero() throws Exception {
pigServer.registerQuery("STORE B INTO 'out' USING mock.Storage();");
TupleSummaryData s =
SummaryData.fromJSON((String) data.get("out").get(0).get(0), TupleSummaryData.class);
System.out.println(s);
LOG.info(String.valueOf(s));
assertEquals(0, s.getFields().get(1).getNumber().getValue().getMax(), 0);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import org.slf4j.LoggerFactory;

public class TestInputOutputFormat {

private static final Logger LOG = LoggerFactory.getLogger(TestInputOutputFormat.class);

public static AddressBook nextAddressbook(int i) {
Expand Down Expand Up @@ -244,11 +245,11 @@ public void testSchemaEvolution() throws Exception {

private void read(String outputPath, int expected) throws FileNotFoundException, IOException {
final BufferedReader out = new BufferedReader(new FileReader(new File(outputPath.toString())));
String lineOut = null;
String lineOut;
int lineNumber = 0;
while ((lineOut = out.readLine()) != null) {
lineOut = lineOut.substring(lineOut.indexOf("\t") + 1);
System.out.println(lineOut);
LOG.info(lineOut);
++lineNumber;
}
out.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
import thrift.test.OneOfEach;

public class TestParquetWriteProtocol {

private static final Logger LOG = LoggerFactory.getLogger(TestParquetWriteProtocol.class);

@Test
Expand Down Expand Up @@ -697,7 +698,6 @@ private void validateThrift(String[] expectations, TBase<?, ?> a) throws TExcept

private void validateThrift(Configuration configuration, String[] expectations, TBase<?, ?> a) throws TException {
final ThriftSchemaConverter thriftSchemaConverter = new ThriftSchemaConverter(configuration);
// System.out.println(a);
final Class<TBase<?, ?>> class1 = (Class<TBase<?, ?>>) a.getClass();
final MessageType schema = thriftSchemaConverter.convert(class1);
LOG.info("{}", schema);
Expand Down
13 changes: 6 additions & 7 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@
<commons-lang3.version>3.14.0</commons-lang3.version>

<!-- properties for the profiles -->
<surefire.argLine> </surefire.argLine>
<surefire.logLevel>INFO</surefire.logLevel>
<surefire.argLine>-XX:MaxJavaStackTraceDepth=8</surefire.argLine>
<surefire.logLevel>ERROR</surefire.logLevel>

<!-- Resource intesive tests are enabled by default but disabled in the CI envrionment -->
<enableResourceIntensiveTests>true</enableResourceIntensiveTests>
Expand Down Expand Up @@ -713,13 +713,12 @@
</build>
</profile>

<!-- Profile for CI tests to have less output -->
<!-- Profile for tests to have more output -->
<profile>
<id>ci-test</id>
<id>verbose-test</id>
<properties>
<surefire.logLevel>WARN</surefire.logLevel>
<surefire.argLine>-XX:MaxJavaStackTraceDepth=10</surefire.argLine>
<enableResourceIntensiveTests>false</enableResourceIntensiveTests>
<surefire.logLevel>INFO</surefire.logLevel>
<surefire.argLine>-XX:MaxJavaStackTraceDepth=1024</surefire.argLine>
</properties>
</profile>

Expand Down

0 comments on commit ed308ff

Please sign in to comment.