- 필수 기능
- 시작하기
- Glossary
- 표준 속성
- Guides
- Agent
- 통합
- 개방형텔레메트리
- 개발자
- Administrator's Guide
- API
- Datadog Mobile App
- CoScreen
- Cloudcraft
- 앱 내
- 서비스 관리
- 인프라스트럭처
- 애플리케이션 성능
- APM
- Continuous Profiler
- 스팬 시각화
- 데이터 스트림 모니터링
- 데이터 작업 모니터링
- 디지털 경험
- 소프트웨어 제공
- 보안
- AI Observability
- 로그 관리
- 관리
ID: java-code-style/avoid-dollar-signs
Language: Java
Severity: Notice
Category: Code Style
Avoid using dollar signs in identifiers as auto-generated names containing dollar signs ($
) may lead to unexpected issues that can be difficult to diagnose.
public class $Foo {}
public class Fo$o {}
public class Foo$ {
String bar$ = "bar";
public String ba$ (){
return "baz";
}
}
/*
/* Copyright 2018-2024 contributors to the OpenLineage project
/* SPDX-License-Identifier: Apache-2.0
*/
package io.openlineage.spark3.agent.lifecycle.plan.column;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.mockStatic;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageBuilder;
import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageContext;
import io.openlineage.spark.agent.util.ScalaConversionUtils;
import io.openlineage.sql.ColumnMeta;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.LongAccumulator;
import org.apache.spark.sql.catalyst.expressions.Attribute;
import org.apache.spark.sql.catalyst.expressions.AttributeReference;
import org.apache.spark.sql.catalyst.expressions.ExprId;
import org.apache.spark.sql.catalyst.expressions.NamedExpression;
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap$;
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions;
import org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation;
import org.apache.spark.sql.types.IntegerType$;
import org.apache.spark.sql.types.Metadata$;
import org.apache.spark.sql.types.StringType$;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.MockedStatic;
import org.mockito.stubbing.Answer;
class JdbcColumnLineageExpressionCollectorTest {
ColumnLevelLineageBuilder builder = mock(ColumnLevelLineageBuilder.class);
ColumnLevelLineageContext context = mock(ColumnLevelLineageContext.class);
ExprId exprId1 = ExprId.apply(20);
ExprId exprId2 = ExprId.apply(21);
ExprId dependencyId1 = ExprId.apply(0);
ExprId dependencyId2 = ExprId.apply(1);
Attribute expression1 =
new AttributeReference(
"k", IntegerType$.MODULE$, false, Metadata$.MODULE$.empty(), exprId1, null);
Attribute expression2 =
new AttributeReference(
"j", StringType$.MODULE$, false, Metadata$.MODULE$.empty(), exprId2, null);
JDBCRelation relation = mock(JDBCRelation.class);
JDBCOptions jdbcOptions = mock(JDBCOptions.class);
String jdbcQuery =
"(select js1.k, CONCAT(js1.j1, js2.j2) as j from jdbc_source1 js1 join jdbc_source2 js2 on js1.k = js2.k) SPARK_GEN_SUBQ_0";
String invalidJdbcQuery = "(INVALID) SPARK_GEN_SUBQ_0";
String url = "jdbc:postgresql://localhost:5432/test";
Map<ColumnMeta, ExprId> mockMap = new HashMap<>();
@BeforeEach
void setup() {
when(relation.jdbcOptions()).thenReturn(jdbcOptions);
scala.collection.immutable.Map<String, String> properties =
ScalaConversionUtils.<String, String>asScalaMapEmpty();
when(jdbcOptions.parameters())
.thenReturn(CaseInsensitiveMap$.MODULE$.<String>apply(properties));
when(context.getBuilder()).thenReturn(builder);
}
@Test
void testInputCollection() {
when(jdbcOptions.tableOrQuery()).thenReturn(jdbcQuery);
when(jdbcOptions.url()).thenReturn(url);
final LongAccumulator id = new LongAccumulator(Long::sum, 0L);
try (MockedStatic<NamedExpression> utilities = mockStatic(NamedExpression.class)) {
utilities
.when(NamedExpression::newExprId)
.thenAnswer(
(Answer<ExprId>)
invocation -> {
ExprId exprId = ExprId.apply(id.get());
id.accumulate(1);
return exprId;
});
doAnswer(
invocation ->
mockMap.putIfAbsent(invocation.getArgument(0), invocation.getArgument(1)))
.when(builder)
.addExternalMapping(any(ColumnMeta.class), any(ExprId.class));
when(builder.getMapping(any(ColumnMeta.class)))
.thenAnswer(invocation -> mockMap.get(invocation.getArgument(0)));
JdbcColumnLineageCollector.extractExpressionsFromJDBC(
context, relation, Arrays.asList(expression1, expression2));
verify(builder, times(1)).addDependency(exprId2, dependencyId1);
verify(builder, times(1)).addDependency(exprId2, dependencyId2);
utilities.verify(NamedExpression::newExprId, times(3));
}
}
@Test
void testInvalidQuery() {
when(jdbcOptions.tableOrQuery()).thenReturn(invalidJdbcQuery);
when(jdbcOptions.url()).thenReturn(url);
JdbcColumnLineageCollector.extractExpressionsFromJDBC(
context, relation, Arrays.asList(expression1, expression2));
verify(builder, never()).addDependency(any(ExprId.class), any(ExprId.class));
}
}\
public class Foo { }