Skip to content

Commit d48c536

Browse files
Fixing issues in winutils on Windows (Azure#34206)
* Fixing issues in winutils on Windows * Update external_dependencies.txt
1 parent 3de9b49 commit d48c536

File tree

12 files changed

+74
-10
lines changed

12 files changed

+74
-10
lines changed

eng/versioning/external_dependencies.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -353,6 +353,7 @@ cosmos_org.scalatest:scalatest_2.12;3.2.2
353353
cosmos_org.scalatest:scalatest-flatspec_2.12;3.2.3
354354
cosmos_org.scalactic:scalactic_2.12;3.2.3
355355
cosmos_org.scalamock:scalamock_2.12;5.0.0
356+
cosmos_com.globalmentor:hadoop-bare-naked-local-fs;0.1.0
356357

357358
# Maven Tools for Cosmos Spark connector only
358359
cosmos_org.scalatest:scalatest-maven-plugin;2.0.2

sdk/cosmos/azure-cosmos-spark_3_2-12/pom.xml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,12 @@
110110
</dependency>
111111

112112
<!-- Test -->
113+
<dependency>
114+
<groupId>com.globalmentor</groupId>
115+
<artifactId>hadoop-bare-naked-local-fs</artifactId>
116+
<version>0.1.0</version> <!-- {x-version-update;cosmos_com.globalmentor:hadoop-bare-naked-local-fs;external_dependency} -->
117+
<scope>test</scope>
118+
</dependency>
113119
<dependency>
114120
<groupId>org.mockito</groupId>
115121
<artifactId>mockito-core</artifactId>
@@ -334,6 +340,7 @@
334340
<configuration>
335341
<source>11</source>
336342
<target>11</target>
343+
<scalaVersion>2.12.10</scalaVersion>
337344
</configuration>
338345
<executions>
339346
<execution>

sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosCatalogBase.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class CosmosCatalogBase
5252

5353
/**
5454
* Called to initialize configuration.
55-
* <p>
55+
* <br/>
5656
* This method is called once, just after the provider is instantiated.
5757
*
5858
* @param name the name used to identify and load this catalog
@@ -91,7 +91,7 @@ class CosmosCatalogBase
9191

9292
/**
9393
* List top-level namespaces from the catalog.
94-
* <p>
94+
* <br/>
9595
* If an object such as a table, view, or function exists, its parent namespaces must also exist
9696
* and must be returned by this discovery method. For example, if table a.t exists, this method
9797
* must return ["a"] in the result array.
@@ -128,7 +128,7 @@ class CosmosCatalogBase
128128

129129
/**
130130
* List namespaces in a namespace.
131-
* <p>
131+
* <br/>
132132
* Cosmos supports only single depth database. Hence we always return an empty list of namespaces.
133133
* or throw if the root namespace doesn't exist
134134
*/

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/CosmosCatalogITestBase.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@ abstract class CosmosCatalogITestBase extends IntegrationSpec with CosmosClient
3131
.enableHiveSupport()
3232
.getOrCreate()
3333

34+
LocalJavaFileSystem.applyToSparkSession(spark)
35+
3436
spark.conf.set(s"spark.sql.catalog.testCatalog", "com.azure.cosmos.spark.CosmosCatalog")
3537
spark.conf.set(s"spark.sql.catalog.testCatalog.spark.cosmos.accountEndpoint", cosmosEndpoint)
3638
spark.conf.set(s"spark.sql.catalog.testCatalog.spark.cosmos.accountKey", cosmosMasterKey)

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SampleCosmosCatalogE2EMain.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ object SampleCosmosCatalogE2EMain {
3333
.appName("spark connector sample")
3434
.master("local")
3535
.getOrCreate()
36+
37+
LocalJavaFileSystem.applyToSparkSession(spark)
38+
3639
spark.conf.set(s"spark.sql.catalog.mycatalog", "com.azure.cosmos.spark.CosmosCatalog")
3740
spark.conf.set(s"spark.sql.catalog.mycatalog.spark.cosmos.accountEndpoint", cosmosEndpoint)
3841
spark.conf.set(s"spark.sql.catalog.mycatalog.spark.cosmos.accountKey", cosmosMasterKey)

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SampleE2EMain.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ object SampleE2EMain {
3434
.master("local")
3535
.getOrCreate()
3636

37+
LocalJavaFileSystem.applyToSparkSession(spark)
38+
3739
// scalastyle:off underscore.import
3840
// scalastyle:off import.grouping
3941
import spark.implicits._

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SampleReadE2EMain.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ object SampleReadE2EMain {
3737
.master("local")
3838
.getOrCreate()
3939

40+
LocalJavaFileSystem.applyToSparkSession(spark)
41+
4042
val df = spark.read.format("cosmos.oltp").options(cfg).load()
4143
df.show(numRows = 10)
4244

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SampleStructuredStreamingE2EMain.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@ object SampleStructuredStreamingE2EMain {
3131
.master("local")
3232
.getOrCreate()
3333

34+
LocalJavaFileSystem.applyToSparkSession(spark)
35+
3436
spark.streams.addListener(new StreamingQueryListener() {
3537
override def onQueryStarted(queryStarted: QueryStartedEvent): Unit = {
3638
println("Query started: " + queryStarted.id)

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EConfigResolutionITest.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ class SparkE2EConfigResolutionITest extends IntegrationSpec with CosmosClient wi
2626
.config(sparkConfig)
2727
.getOrCreate()
2828

29+
LocalJavaFileSystem.applyToSparkSession(spark)
30+
2931
// scalastyle:off underscore.import
3032
// scalastyle:off import.grouping
3133
import spark.implicits._

sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EQueryITestBase.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,11 +76,15 @@ abstract class SparkE2EQueryITestBase
7676
item.getAs[String]("id") shouldEqual id
7777

7878
assertMetrics(meterRegistry, "cosmos.client.op.latency", expectedToFind = true)
79+
7980
// Gateway requests are not happening always - but they can happen
8081
//assertMetrics(meterRegistry, "cosmos.client.req.gw", expectedToFind = true)
82+
8183
assertMetrics(meterRegistry, "cosmos.client.req.rntbd", expectedToFind = true)
8284
assertMetrics(meterRegistry, "cosmos.client.rntbd", expectedToFind = true)
83-
assertMetrics(meterRegistry, "cosmos.client.rntbd.addressResolution", expectedToFind = true)
85+
86+
// address resolution requests can but don't have to happen - they are optional
87+
// assertMetrics(meterRegistry, "cosmos.client.rntbd.addressResolution", expectedToFind = true)
8488
}
8589

8690
private def insertDummyValue() : Unit = {

0 commit comments

Comments
 (0)