Skip to content

Commit

Permalink
Update forbiddenapis to 3.5.1 and move to parent pom.xml (#16575)
Browse files Browse the repository at this point in the history
* Update forbiddenapis to 3.5.1 and move to parent pom.xml

* Fix forbiddenapis errors

* Manage awaitility dependency in parent pom.xml

* Remove unused signature files

* Manage hamcrest version and update awaitility to 4.2.0

* Extend block list of shaded utils

* Fix wrong util usage

* Allow net.fortuna.ical4j.util.Strings again

We currently don't have a good replacement.

* Be less strict with org.apache.logging.log4j.util

* Add missing charset

* Add toUpperCase and toLowerCase methods to our StringUtils class

* Use StringUtils helper for upper/lower case conversion

---------

Co-authored-by: Othello Maurer <[email protected]>
  • Loading branch information
bernd and thll authored Sep 21, 2023
1 parent 659aed2 commit 03d7c5f
Show file tree
Hide file tree
Showing 29 changed files with 324 additions and 245 deletions.
1 change: 0 additions & 1 deletion config/forbidden-apis/netty3.txt

This file was deleted.

100 changes: 0 additions & 100 deletions config/forbidden-apis/signatures.txt

This file was deleted.

5 changes: 5 additions & 0 deletions full-backend-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,11 @@
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.graylog.plugins.views;

import au.com.bytecode.opencsv.CSVParser;
import io.restassured.http.Header;
import io.restassured.path.json.JsonPath;
import io.restassured.response.ValidatableResponse;
Expand All @@ -36,11 +37,12 @@
import org.joda.time.DateTime;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.shadow.com.univocity.parsers.csv.Csv;
import org.junit.jupiter.params.shadow.com.univocity.parsers.csv.CsvParser;

import javax.ws.rs.core.MediaType;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
Expand Down Expand Up @@ -101,13 +103,13 @@ public void beforeAll() {
""");

api.search().waitForMessagesCount(3);
api.fieldTypes().waitForFieldTypeDefinitions( "source", "facility", "level");
api.fieldTypes().waitForFieldTypeDefinitions("source", "facility", "level");
}

@ContainerMatrixTest
void testAggregationByStream() {
final ValidatableResponse validatableResponse =
api.post("/search/aggregate","""
api.post("/search/aggregate", """
{
"group_by": [
{
Expand All @@ -133,7 +135,7 @@ void testAggregationByStream() {
@ContainerMatrixTest
void testStdDevSorting() {
final GraylogApiResponse responseDesc =
new GraylogApiResponse(api.post("/search/aggregate","""
new GraylogApiResponse(api.post("/search/aggregate", """
{
"group_by": [
{
Expand All @@ -159,7 +161,7 @@ void testStdDevSorting() {
.containsExactly(0.5, 0.0);

final GraylogApiResponse responseAsc =
new GraylogApiResponse(api.post("/search/aggregate","""
new GraylogApiResponse(api.post("/search/aggregate", """
{
"group_by": [
{
Expand All @@ -185,7 +187,7 @@ void testStdDevSorting() {
@ContainerMatrixTest
void testAggregationByStreamTitle() {
final ValidatableResponse validatableResponse =
api.post("/search/aggregate","""
api.post("/search/aggregate", """
{
"group_by": [
{
Expand Down Expand Up @@ -360,7 +362,7 @@ void testGetRequestAcii() {
}

@ContainerMatrixTest
void testCsvRender() {
void testCsvRender() throws Exception {
final InputStream response = given()
.spec(api.requestSpecification())
.header(new Header("Accept", MoreMediaTypes.TEXT_CSV))
Expand All @@ -386,10 +388,7 @@ void testCsvRender() {
.statusCode(200)
.extract().body().asInputStream();

final CsvParser csvParser = new CsvParser(Csv.parseRfc4180());
final List<String[]> lines = csvParser.parseAll(response);


final List<String[]> lines = parseCsvLines(response);

// headers
Assertions.assertArrayEquals(lines.get(0), new String[]{"grouping: facility", "metric: count(facility)"});
Expand All @@ -399,8 +398,21 @@ void testCsvRender() {
Assertions.assertArrayEquals(lines.get(2), new String[]{"test", "1"});
}

private List<String[]> parseCsvLines(InputStream inputStream) throws Exception {
final CSVParser csvParser = new CSVParser(',', '"');
final List<String[]> lines = new ArrayList<>();

try (final var reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) {
while (reader.ready()) {
lines.add(csvParser.parseLine(reader.readLine()));
}
}

return lines;
}

@ContainerMatrixTest
void testGetRequestCsv() {
void testGetRequestCsv() throws Exception {

final InputStream response = given()
.spec(api.requestSpecification())
Expand All @@ -413,8 +425,7 @@ void testGetRequestCsv() {
.extract().body().asInputStream();


final CsvParser csvParser = new CsvParser(Csv.parseRfc4180());
final List<String[]> lines = csvParser.parseAll(response);
final List<String[]> lines = parseCsvLines(response);

// headers
Assertions.assertArrayEquals(lines.get(0), new String[]{"grouping: facility", "metric: count(facility)"});
Expand Down
48 changes: 0 additions & 48 deletions graylog-project-parent/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -642,12 +642,6 @@
<version>${jukito.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>${awaitility.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>nl.jqno.equalsverifier</groupId>
<artifactId>equalsverifier</artifactId>
Expand Down Expand Up @@ -752,48 +746,6 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>de.thetaphi</groupId>
<artifactId>forbiddenapis</artifactId>
<configuration>
<suppressAnnotations>
<suppressAnnotation>org.graylog2.shared.SuppressForbidden</suppressAnnotation>
</suppressAnnotations>
<!-- if the used Java version is too new, don't fail, just do nothing: -->
<failOnUnsupportedJava>false</failOnUnsupportedJava>
<failOnViolation>true</failOnViolation>
<bundledSignatures>
<!-- This will automatically choose the right signatures based on 'maven.compiler.target': -->
<bundledSignature>jdk-unsafe</bundledSignature>
<bundledSignature>jdk-deprecated</bundledSignature>
<bundledSignature>jdk-reflection</bundledSignature>
<!-- disallow undocumented classes like sun.misc.Unsafe: -->
<bundledSignature>jdk-non-portable</bundledSignature>
<!-- Workaround until signatures for ${commons-io.version} are released: -->
<bundledSignature>commons-io-unsafe-2.11.0</bundledSignature>
</bundledSignatures>
<signaturesFiles>
<signaturesFile>${project.basedir}/../config/forbidden-apis/netty3.txt</signaturesFile>
<signaturesFile>${project.basedir}/../config/forbidden-apis/signatures.txt</signaturesFile>
</signaturesFiles>
</configuration>
<executions>
<execution>
<id>forbidden-apis-src</id>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
<execution>
<id>forbidden-apis-test</id>
<phase>test-compile</phase>
<goals>
<goal>testCheck</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public Map<DateTime, Map<String, Long>> fieldHistogram(String fieldName, Set<Str

for (ParsedDateHistogram.ParsedBucket bucket : histogramBuckets) {
final ZonedDateTime zonedDateTime = (ZonedDateTime) bucket.getKey();
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli()).toDateTime(DateTimeZone.UTC);
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli(), DateTimeZone.UTC);

final Terms sourceFieldAgg = bucket.getAggregations().get(AGG_MESSAGE_FIELD);
final List<? extends Terms.Bucket> termBuckets = sourceFieldAgg.getBuckets();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import com.github.joschi.jadconfig.util.Duration;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.logging.log4j.util.Strings;
import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.graylog.shaded.elasticsearch7.org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
Expand Down Expand Up @@ -392,7 +391,7 @@ public IndicesBlockStatus getIndicesBlocksStatus(final List<String> indices) {
final GetSettingsRequest getSettingsRequest = new GetSettingsRequest()
.indices(indices.toArray(new String[]{}))
.indicesOptions(IndicesOptions.fromOptions(false, true, true, true))
.names(Strings.EMPTY_ARRAY);
.names(new String[]{});

return client.execute((c, requestOptions) -> {
final GetSettingsResponse settingsResponse = c.indices().getSettings(getSettingsRequest, requestOptions);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import org.graylog.shaded.elasticsearch7.org.elasticsearch.client.Request;
import org.graylog2.indexer.cluster.NodeAdapter;
import org.graylog2.shared.utilities.StringUtils;
import org.graylog2.storage.SearchVersion;

import javax.inject.Inject;
import java.util.Locale;
import java.util.Optional;

public class NodeAdapterES7 implements NodeAdapter {
Expand All @@ -47,7 +47,7 @@ public Optional<SearchVersion> version() {
final Optional<String> version = resp.map(r -> r.path("version")).map(r -> r.path("number")).map(JsonNode::textValue);

final SearchVersion.Distribution distribution = resp.map(r -> r.path("version")).map(r -> r.path("distribution")).map(JsonNode::textValue)
.map(d -> d.toUpperCase(Locale.ROOT))
.map(StringUtils::toUpperCase)
.map(SearchVersion.Distribution::valueOf)
.orElse(SearchVersion.Distribution.ELASTICSEARCH);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,6 @@

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.graylog2.indexer.IndexToolsAdapter;
import org.graylog2.plugin.Message;
import org.graylog2.plugin.streams.Stream;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchRequest;
import org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchResponse;
import org.graylog.shaded.opensearch2.org.opensearch.action.support.IndicesOptions;
Expand All @@ -37,6 +32,11 @@
import org.graylog.shaded.opensearch2.org.opensearch.search.aggregations.bucket.histogram.ParsedDateHistogram;
import org.graylog.shaded.opensearch2.org.opensearch.search.aggregations.bucket.terms.Terms;
import org.graylog.shaded.opensearch2.org.opensearch.search.builder.SearchSourceBuilder;
import org.graylog2.indexer.IndexToolsAdapter;
import org.graylog2.plugin.Message;
import org.graylog2.plugin.streams.Stream;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;

import javax.inject.Inject;
import java.time.ZonedDateTime;
Expand Down Expand Up @@ -94,7 +94,7 @@ public Map<DateTime, Map<String, Long>> fieldHistogram(String fieldName, Set<Str

for (ParsedDateHistogram.ParsedBucket bucket : histogramBuckets) {
final ZonedDateTime zonedDateTime = (ZonedDateTime) bucket.getKey();
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli()).toDateTime(DateTimeZone.UTC);
final DateTime date = new DateTime(zonedDateTime.toInstant().toEpochMilli(), DateTimeZone.UTC);

final Terms sourceFieldAgg = bucket.getAggregations().get(AGG_MESSAGE_FIELD);
final List<? extends Terms.Bucket> termBuckets = sourceFieldAgg.getBuckets();
Expand Down
Loading

0 comments on commit 03d7c5f

Please sign in to comment.