Skip to content

Commit

Permalink
Cleaning up diagnostic output.
Browse files Browse the repository at this point in the history
  • Loading branch information
piotrszul committed Feb 1, 2025
1 parent dd424c9 commit e05a2a3
Show file tree
Hide file tree
Showing 15 changed files with 34 additions and 101 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,6 @@ public AggregateExecutor(@Nonnull final QueryConfiguration configuration,
public AggregateResponse execute(@Nonnull final AggregateRequest query) {
final ResultWithExpressions resultWithExpressions = buildQuery(
query);
resultWithExpressions.getDataset().explain();
resultWithExpressions.getDataset().show(1_000, false);

// Translate the result into a response object to be passed back to the user.
return buildResponse(resultWithExpressions);
}
Expand Down Expand Up @@ -105,8 +102,8 @@ private AggregateResponse buildResponse(
@Nonnull
@SuppressWarnings("unchecked")
private Function<Row, AggregateResponse.Grouping> mapRowToGrouping(
@Nonnull final List<EvaluatedPath> aggregations, @Nonnull final List<EvaluatedPath> groupings,
@Nonnull final List<EvaluatedPath> filters) {
@Nonnull final List<EvaluatedPath> aggregations, @Nonnull final List<EvaluatedPath> groupings,
@Nonnull final List<EvaluatedPath> filters) {
return row -> {
final List<Optional<Type>> labels = new ArrayList<>();
final List<Optional<Type>> results = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,6 @@ public List<IBaseResource> getResources(final int theFromIndex, final int theToI
.of(subjectResource.toCode());
requireNonNull(encoder);
reportQueryPlan(resources);
resources.explain();

return resources.as(encoder).collectAsList();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ void simpleQueryWithLabels() {
assertArrayEquals(new String[]{"patient_gender", "patient_count"},
result.columns());
assertThat(result)
.debugAllRows()
.hasRows(spark, "responses/AggregateQueryExecutorTest/simpleQuery.tsv");
}

Expand All @@ -116,7 +115,6 @@ void simpleQueryWithNoLabels() {
final Dataset<Row> result = executor.buildQuery(request).getDataset();
//assertTrue(Stream.of(result.columns()).allMatch(Strings::looksLikeAlias));
assertThat(result)
.debugAllRows()
.hasRows(spark, "responses/AggregateQueryExecutorTest/simpleQuery.tsv");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ void multiplePolymorphicResolves() {

final Dataset<Row> result = executor.buildQuery(request, ProjectionConstraint.FLAT);
assertThat(result)
.debugAllRows()
.hasRows(spark, "responses/ExtractQueryTest/multiplePolymorphicResolves.tsv");
}

Expand Down Expand Up @@ -469,7 +468,6 @@ void structuredResult() {
);

assertThat(result)
.debugAllRows()
.hasRows(spark, "responses/ExtractQueryTest/structuredResult.tsv");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,6 @@ void testDesignationFunctionWithNoLanguage() {
assertThatResultOf(ResourceType.CONDITION,
"code.coding.designation(http://terminology.hl7.org/CodeSystem/designation-usage|display)")
.selectOrderedResult()
.debugAllRows()
.hasRows(spark, "responses/ParserTest/testDesignationFunctionWithNoLanguage.tsv");
}

Expand Down
2 changes: 1 addition & 1 deletion fhir-server/src/test/resources/logback-test.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<pattern>[%level] %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<logger level="DEBUG" name="au.csiro"/>
<logger level="INFO" name="au.csiro"/>
<logger level="ERROR" name="org.apache.hadoop.metrics2"/>
<logger level="ERROR" name="org.apache.spark.sql.execution.CacheManager"/>
<logger level="ERROR" name="org.apache.spark.sql.catalyst.util.SparkStringUtils"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.hl7.fhir.r4.model.Enumerations.ResourceType;

@Value
@Slf4j
public class DataRootResolver {

ResourceType subjectResource;
Expand Down Expand Up @@ -108,7 +110,7 @@ public void collectDataRoots(@Nonnull final DataRoot currentRoot,
}
} else if (headPath instanceof Paths.ExternalConstantPath ecp) {
// we do not need to do anything here
System.out.println("External constant path" + ecp);
log.debug("External constant path: {}", ecp);
if ("resource".equals(ecp.getName()) || "rootResource".equals(ecp.getName())) {
// this root should already be addded here
collectDataRoots(ResourceRoot.of(subjectResource), fhirPath.suffix(), FhirPath.nullPath(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ public static ResourceCollection reverseResolve(@Nonnull final ResourceCollectio
final ReverseResolveRoot root = ReverseResolveRoot.ofResource(input.getResourceType(),
childResourceType, childPath);

log.info("Reverse resolve root: {}", root);
System.out.println("Reverse resolve root: " + root);

log.debug("Reverse resolve root: {}", root);
return evaluationContext.resolveReverseJoin(input, subjectPath.toExpression());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,6 @@ void simple() {
), Collections.emptyList(), Optional.empty());

final Dataset<Row> result = executor.buildQuery(request);
result.show();

final Dataset<Row> expected = DatasetBuilder.of(spark)
.withColumn("id", DataTypes.StringType)
.withColumn("given_name", DataTypes.StringType)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ void check() {
for (int i = 0; i < result.size(); i++) {
assertTrue(result.getBoolean(i), "Test " + i + " failed: " + tests.get(i));
}
System.out.println(result);
}
}

Expand Down Expand Up @@ -116,13 +115,6 @@ void testSingular() {
.assertEquals(13, valueOf(13).singular())
.assertEquals("a", arrayOfOne("a").singular())
.check();

// final SparkException ex = assertThrows(SparkException.class, () ->
// spark.range(1).select(
// ColumnHelpers.singular(functions.array(functions.lit("a"), functions.lit("b")))
// ).collect());
// System.out.println(ex.getCause().getMessage());

}

@Test
Expand Down
Loading

0 comments on commit e05a2a3

Please sign in to comment.