From e07b94a6c33fab51902c0ae8b7c6726d893964f9 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Tue, 2 Jan 2024 14:38:20 +0000 Subject: [PATCH 01/44] Add support for ST_ConcaveHull. --- pom.xml | 2 +- python/mosaic/api/functions.py | 64 +++++++++--- python/test/test_vector_functions.py | 1 + .../mosaic/core/geometry/MosaicGeometry.scala | 3 + .../core/geometry/MosaicGeometryESRI.scala | 0 .../core/geometry/MosaicGeometryJTS.scala | 7 ++ .../expressions/geometry/ST_ConcaveHull.scala | 77 +++++++++++++++ .../labs/mosaic/functions/MosaicContext.scala | 5 + .../geometry/ST_ConcaveHullBehaviors.scala | 99 +++++++++++++++++++ .../geometry/ST_ConcaveHullTest.scala | 34 +++++++ 10 files changed, 277 insertions(+), 15 deletions(-) delete mode 100644 src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryESRI.scala create mode 100644 src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala create mode 100644 src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala create mode 100644 src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala diff --git a/pom.xml b/pom.xml index f0759de70..699f74b68 100644 --- a/pom.xml +++ b/pom.xml @@ -93,7 +93,7 @@ h3 - 3.7.0 + 3.7.3 org.locationtech.jts diff --git a/python/mosaic/api/functions.py b/python/mosaic/api/functions.py index fff13d493..7cd7c8b84 100644 --- a/python/mosaic/api/functions.py +++ b/python/mosaic/api/functions.py @@ -16,6 +16,7 @@ "st_length", "st_perimeter", "st_convexhull", + "st_concavehull", "st_buffer", "st_bufferloop", "st_dimension", @@ -154,6 +155,41 @@ def st_convexhull(geom: ColumnOrName) -> Column: ) +def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: ColumnOrName = lit(False)) -> Column: + """ + Compute the concave hull of a geometry or multi-geometry object. + It uses lengthRatio and + allowHoles to determine the concave hull. lengthRatio is the ratio of the + length of the concave hull to the length of the convex hull. If set to 1, + this is the same as the convex hull. If set to 0, this is the same as the + bounding box. AllowHoles is a boolean that determines whether the concave + hull can have holes. If set to true, the concave hull can have holes. If set + to false, the concave hull will not have holes. (For PostGIS, the default is + false.) + + Parameters + ---------- + geom : Column + The input geometry + concavity : Column + The concavity of the hull + has_holes : Column + Whether the hull has holes + + Returns + ------- + Column + A polygon + + """ + return config.mosaic_context.invoke_function( + "st_concavehull", + pyspark_to_java_column(geom), + pyspark_to_java_column(concavity), + pyspark_to_java_column(has_holes) + ) + + def st_buffer(geom: ColumnOrName, radius: ColumnOrName) -> Column: """ Compute the buffered geometry based on geom and radius. @@ -177,7 +213,7 @@ def st_buffer(geom: ColumnOrName, radius: ColumnOrName) -> Column: def st_bufferloop( - geom: ColumnOrName, inner_radius: ColumnOrName, outer_radius: ColumnOrName + geom: ColumnOrName, inner_radius: ColumnOrName, outer_radius: ColumnOrName ) -> Column: """ Compute the buffered geometry loop (hollow ring) based on geom and provided radius-es. @@ -323,7 +359,7 @@ def st_transform(geom: ColumnOrName, srid: ColumnOrName) -> Column: def st_hasvalidcoordinates( - geom: ColumnOrName, crs: ColumnOrName, which: ColumnOrName + geom: ColumnOrName, crs: ColumnOrName, which: ColumnOrName ) -> Column: """ Checks if all points in geometry are valid with respect to crs bounds. @@ -530,7 +566,7 @@ def st_distance(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: def st_haversine( - lat1: ColumnOrName, lng1: ColumnOrName, lat2: ColumnOrName, lng2: ColumnOrName + lat1: ColumnOrName, lng1: ColumnOrName, lat2: ColumnOrName, lng2: ColumnOrName ) -> Column: """ Compute the haversine distance in kilometers between two latitude/longitude pairs. @@ -682,7 +718,7 @@ def st_unaryunion(geom: ColumnOrName) -> Column: def st_updatesrid( - geom: ColumnOrName, srcSRID: ColumnOrName, destSRID: ColumnOrName + geom: ColumnOrName, srcSRID: ColumnOrName, destSRID: ColumnOrName ) -> Column: """ Updates the SRID of the input geometry `geom` from `srcSRID` to `destSRID`. @@ -951,7 +987,7 @@ def grid_boundary(index_id: ColumnOrName, format_name: ColumnOrName) -> Column: def grid_longlatascellid( - lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName + lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName ) -> Column: """ Returns the grid's cell ID associated with the input `lng` and `lat` coordinates at a given grid `resolution`. @@ -1019,7 +1055,7 @@ def grid_polyfill(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def grid_tessellate( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ Generates: @@ -1054,7 +1090,7 @@ def grid_tessellate( def grid_tessellateexplode( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ Generates: @@ -1214,7 +1250,7 @@ def grid_cellkloopexplode(cellid: ColumnOrName, k: ColumnOrName) -> Column: def grid_geometrykring( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the k-ring of cells around the input geometry. @@ -1239,7 +1275,7 @@ def grid_geometrykring( def grid_geometrykloop( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the k loop (hollow ring) of cells around the input geometry. @@ -1264,7 +1300,7 @@ def grid_geometrykloop( def grid_geometrykringexplode( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the exploded k-ring of cells around the input geometry. @@ -1289,7 +1325,7 @@ def grid_geometrykringexplode( def grid_geometrykloopexplode( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the exploded k loop (hollow ring) of cells around the input geometry. @@ -1336,7 +1372,7 @@ def point_index_geom(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def point_index_lonlat( - lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName + lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName ) -> Column: """ [Deprecated] alias for `grid_longlatascellid` @@ -1393,7 +1429,7 @@ def polyfill(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def mosaic_explode( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ [Deprecated] alias for `grid_tessellateexplode` @@ -1428,7 +1464,7 @@ def mosaic_explode( def mosaicfill( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ [Deprecated] alias for `grid_tessellate` diff --git a/python/test/test_vector_functions.py b/python/test/test_vector_functions.py index 2d189caba..a9b5382af 100644 --- a/python/test/test_vector_functions.py +++ b/python/test/test_vector_functions.py @@ -42,6 +42,7 @@ def test_st_bindings_happy_flow(self): .withColumn("st_buffer", api.st_bufferloop("wkt", lit(1.1), lit(1.2))) .withColumn("st_perimeter", api.st_perimeter("wkt")) .withColumn("st_convexhull", api.st_convexhull("wkt")) + .withColumn("st_concavehull", api.st_concavehull("wkt", lit(0.5))) .withColumn("st_dump", api.st_dump("wkt")) .withColumn("st_translate", api.st_translate("wkt", lit(1), lit(1))) .withColumn("st_scale", api.st_scale("wkt", lit(1), lit(1))) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala index 8af8c9996..1013d811a 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala @@ -95,6 +95,9 @@ trait MosaicGeometry extends GeometryWriter with Serializable { def convexHull: MosaicGeometry + // Allow holes is set to false by default to match the behavior of the POSTGIS implementation + def concaveHull(lengthRatio: Double, allow_holes: Boolean = false): MosaicGeometry + def minMaxCoord(dimension: String, func: String): Double = { val coordArray = this.getShellPoints.map(shell => { val unitArray = dimension.toUpperCase(Locale.ROOT) match { diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryESRI.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryESRI.scala deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala index 17960d423..5c1d44e37 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala @@ -11,6 +11,7 @@ import com.databricks.labs.mosaic.core.types.model.GeometryTypeEnum import com.databricks.labs.mosaic.core.types.model.GeometryTypeEnum._ import com.esotericsoftware.kryo.Kryo import org.apache.spark.sql.catalyst.InternalRow +import org.locationtech.jts.algorithm.hull.ConcaveHull import org.locationtech.jts.geom.{Geometry, GeometryCollection, GeometryFactory} import org.locationtech.jts.geom.util.AffineTransformation import org.locationtech.jts.io._ @@ -180,6 +181,12 @@ abstract class MosaicGeometryJTS(geom: Geometry) extends MosaicGeometry { MosaicGeometryJTS(convexHull) } + override def concaveHull(lengthRatio: Double, allow_holes: Boolean = false): MosaicGeometryJTS = { + val concaveHull = ConcaveHull.concaveHullByLengthRatio(geom, lengthRatio, allow_holes) + concaveHull.setSRID(geom.getSRID) + MosaicGeometryJTS(concaveHull) + } + override def unaryUnion: MosaicGeometryJTS = { val unaryUnion = geom.union() unaryUnion.setSRID(geom.getSRID) diff --git a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala new file mode 100644 index 000000000..6bf602a5b --- /dev/null +++ b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala @@ -0,0 +1,77 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.MosaicGeometry +import com.databricks.labs.mosaic.expressions.base.{GenericExpressionFactory, WithExpressionInfo} +import com.databricks.labs.mosaic.expressions.geometry.base.UnaryVector2ArgExpression +import com.databricks.labs.mosaic.functions.MosaicExpressionConfig +import org.apache.spark.sql.adapters.Column +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types.DataType + +/** + * Returns the concave hull for a given geometry. It uses lengthRatio and + * allowHoles to determine the concave hull. lengthRatio is the ratio of the + * length of the concave hull to the length of the convex hull. If set to 1, + * this is the same as the convex hull. If set to 0, this is the same as the + * bounding box. AllowHoles is a boolean that determines whether the concave + * hull can have holes. If set to true, the concave hull can have holes. If set + * to false, the concave hull will not have holes. (For PostGIS, the default is + * false.) + * @param inputGeom + * The input geometry. + * @param expressionConfig + * Additional arguments for the expression (expressionConfigs). + */ +case class ST_ConcaveHull( + inputGeom: Expression, + lengthRatio: Expression, + allowHoles: Expression, + expressionConfig: MosaicExpressionConfig +) extends UnaryVector2ArgExpression[ST_ConcaveHull]( + inputGeom, + lengthRatio, + allowHoles, + returnsGeometry = true, + expressionConfig + ) { + + override def dataType: DataType = inputGeom.dataType + + override def geometryTransform(geometry: MosaicGeometry, arg1: Any, arg2: Any): Any = { + val lenRatio = arg1.asInstanceOf[Double] + val allowHoles = arg2.asInstanceOf[Boolean] + geometry.concaveHull(lenRatio, allowHoles) + } + + override def geometryCodeGen(geometryRef: String, arg1Ref: String, arg2Ref: String, ctx: CodegenContext): (String, String) = { + val convexHull = ctx.freshName("concaveHull") + val code = s"""$mosaicGeomClass $convexHull = $geometryRef.concaveHull($arg1Ref, $arg2Ref);""" + (code, convexHull) + } + +} + +/** Expression info required for the expression registration for spark SQL. */ +object ST_ConcaveHull extends WithExpressionInfo { + + override def name: String = "st_concavehull" + + override def usage: String = "_FUNC_(expr1, expr2, expr3) - Returns the concave hull for a given geometry with or without holes." + + override def example: String = + """ + | Examples: + | > SELECT _FUNC_(a, 0.1, false); + | {"POLYGON (( 0 0, 1 0, 1 1, 0 1 ))"} + | """.stripMargin + + override def builder(expressionConfig: MosaicExpressionConfig): FunctionBuilder = { (children: Seq[Expression]) => + GenericExpressionFactory.construct[ST_ConcaveHull]( + Array(children.head, Column(children(1)).cast("double").expr, children(2)), + expressionConfig + ) + } + +} diff --git a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala index 8e483c702..0714b728c 100644 --- a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala +++ b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala @@ -149,6 +149,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends mosaicRegistry.registerExpression[ST_Centroid](expressionConfig) mosaicRegistry.registerExpression[ST_Contains](expressionConfig) mosaicRegistry.registerExpression[ST_ConvexHull](expressionConfig) + mosaicRegistry.registerExpression[ST_ConcaveHull](expressionConfig) mosaicRegistry.registerExpression[ST_Distance](expressionConfig) mosaicRegistry.registerExpression[ST_Difference](expressionConfig) mosaicRegistry.registerExpression[ST_Dimension](expressionConfig) @@ -558,6 +559,10 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends ColumnAdapter(ST_BufferCapStyle(geom.expr, lit(radius).cast("double").expr, lit(capStyle).expr, expressionConfig)) def st_centroid(geom: Column): Column = ColumnAdapter(ST_Centroid(geom.expr, expressionConfig)) def st_convexhull(geom: Column): Column = ColumnAdapter(ST_ConvexHull(geom.expr, expressionConfig)) + def st_concavehull(geom: Column, concavity: Column, allowHoles: Column): Column = + ColumnAdapter(ST_ConcaveHull(geom.expr, concavity.cast("double").expr, allowHoles.expr, expressionConfig)) + def st_concavehull(geom: Column, concavity: Double, allowHoles: Boolean = false): Column = + ColumnAdapter(ST_ConcaveHull(geom.expr, lit(concavity).cast("double").expr, lit(allowHoles).expr, expressionConfig)) def st_difference(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Difference(geom1.expr, geom2.expr, expressionConfig)) def st_distance(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Distance(geom1.expr, geom2.expr, expressionConfig)) def st_dimension(geom: Column): Column = ColumnAdapter(ST_Dimension(geom.expr, expressionConfig)) diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala new file mode 100644 index 000000000..1d75bc146 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala @@ -0,0 +1,99 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.api.GeometryAPI +import com.databricks.labs.mosaic.core.index._ +import com.databricks.labs.mosaic.functions.MosaicContext +import org.apache.spark.sql.QueryTest +import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenerator, CodegenContext} +import org.apache.spark.sql.execution.WholeStageCodegenExec +import org.apache.spark.sql.functions.lit +import org.scalatest.matchers.must.Matchers.noException +import org.scalatest.matchers.should.Matchers.{an, be, convertToAnyShouldWrapper} + +trait ST_ConcaveHullBehaviors extends QueryTest { + + def concaveHullBehavior(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + import mc.functions._ + val sc = spark + import sc.implicits._ + mc.register(spark) + + val multiPoint = List("MULTIPOINT (-70 35, -72 40, -78 40, -80 45, -70 45, -80 35)") + val expected = List("POLYGON ((-78 40, -80 45, -72 40, -70 45, -70 35, -80 35, -78 40))") + .map(mc.getGeometryAPI.geometry(_, "WKT")) + + val results = multiPoint + .toDF("multiPoint") + .crossJoin(multiPoint.toDF("other")) + .withColumn("result", st_concavehull($"multiPoint", 0.1)) + .select($"result") + .as[String] + .collect() + .map(mc.getGeometryAPI.geometry(_, "WKT")) + + results.zip(expected).foreach { case (l, r) => l.equals(r) shouldEqual true } + + noException should be thrownBy multiPoint.toDF("multiPoint") + .withColumn("result", st_concavehull($"multiPoint", 0.01, allowHoles = true)) + .select($"result") + .as[String] + .collect() + + multiPoint.toDF("multiPoint").createOrReplaceTempView("multiPoint") + + spark.sql("SELECT ST_ConcaveHull(multiPoint, 0.01, true) FROM multiPoint") + .as[String] + .collect() + + } + + def concaveHullCodegen(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + val sc = spark + import mc.functions._ + import sc.implicits._ + mc.register(spark) + + val multiPoint = List("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").toDF("multiPoint") + + val result = multiPoint + .withColumn("result", st_concavehull($"multiPoint", 0.01)) + .select(st_asbinary($"result")) + + val queryExecution = result.queryExecution + val plan = queryExecution.executedPlan + + val wholeStageCodegenExec = plan.find(_.isInstanceOf[WholeStageCodegenExec]) + + wholeStageCodegenExec.isDefined shouldBe true + + val codeGenStage = wholeStageCodegenExec.get.asInstanceOf[WholeStageCodegenExec] + val (_, code) = codeGenStage.doCodeGen() + + noException should be thrownBy CodeGenerator.compile(code) + + val stConvexHull = ST_ConvexHull(lit(1).expr, mc.expressionConfig) + val ctx = new CodegenContext + an[Error] should be thrownBy stConvexHull.genCode(ctx) + } + + def auxiliaryMethods(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + mc.register(spark) + + val stConcaveHull = ST_ConcaveHull(lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr, lit(0.01).expr, lit(true).expr, mc.expressionConfig) + + stConcaveHull.children.length shouldEqual 3 + stConcaveHull.first shouldEqual lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr + stConcaveHull.second shouldEqual lit(0.01).expr + stConcaveHull.third shouldEqual lit(true).expr + + stConcaveHull.makeCopy(Array(lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr, lit(0.01).expr, lit(true).expr)) shouldEqual stConcaveHull + + } + +} diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala new file mode 100644 index 000000000..36e5a3023 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala @@ -0,0 +1,34 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.api.JTS +import com.databricks.labs.mosaic.core.index.{BNGIndexSystem, H3IndexSystem} +import org.apache.spark.sql.QueryTest +import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.test.SharedSparkSession + +class ST_ConcaveHullTest extends QueryTest with SharedSparkSession with ST_ConcaveHullBehaviors { + + private val noCodegen = + withSQLConf( + SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false", + SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.NO_CODEGEN.toString + ) _ + + private val codegenOnly = + withSQLConf( + SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false", + SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true", + SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.CODEGEN_ONLY.toString + ) _ + + test("Testing ST_ConcaveHull (H3, JTS) NO_CODEGEN") { noCodegen { concaveHullBehavior(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) NO_CODEGEN") { noCodegen { concaveHullBehavior(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull (H3, JTS) CODEGEN compilation") { codegenOnly { concaveHullCodegen(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) CODEGEN compilation") { codegenOnly { concaveHullCodegen(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull (H3, JTS) CODEGEN_ONLY") { codegenOnly { concaveHullBehavior(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) CODEGEN_ONLY") { codegenOnly { concaveHullBehavior(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull auxiliaryMethods (H3, JTS)") { noCodegen { auxiliaryMethods(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull auxiliaryMethods (BNG, JTS)") { noCodegen { auxiliaryMethods(BNGIndexSystem, JTS) } } + +} From 0bdae01eb63396988f3a7b82907828fcfc4a216f Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Tue, 2 Jan 2024 14:40:33 +0000 Subject: [PATCH 02/44] Bump down the h3 version. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 699f74b68..f0759de70 100644 --- a/pom.xml +++ b/pom.xml @@ -93,7 +93,7 @@ h3 - 3.7.3 + 3.7.0 org.locationtech.jts From 4d91d0037ca1f98e27bb0ce2790b4578d5a08bb8 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Tue, 2 Jan 2024 15:11:47 +0000 Subject: [PATCH 03/44] Fix python definition. --- python/mosaic/api/functions.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/python/mosaic/api/functions.py b/python/mosaic/api/functions.py index 7cd7c8b84..47c2e75df 100644 --- a/python/mosaic/api/functions.py +++ b/python/mosaic/api/functions.py @@ -155,7 +155,7 @@ def st_convexhull(geom: ColumnOrName) -> Column: ) -def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: ColumnOrName = lit(False)) -> Column: +def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: Any = False) -> Column: """ Compute the concave hull of a geometry or multi-geometry object. It uses lengthRatio and @@ -182,6 +182,10 @@ def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: Colum A polygon """ + + if type(has_holes) == bool: + has_holes = lit(has_holes) + return config.mosaic_context.invoke_function( "st_concavehull", pyspark_to_java_column(geom), From 0a8e56d651639de7341d19be35cc911f3db3fb2a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 23:14:29 +0000 Subject: [PATCH 04/44] Bump org.scoverage:scoverage-maven-plugin from 2.0.0 to 2.0.1 Bumps [org.scoverage:scoverage-maven-plugin](https://github.com/scoverage/scoverage-maven-plugin) from 2.0.0 to 2.0.1. - [Release notes](https://github.com/scoverage/scoverage-maven-plugin/releases) - [Commits](https://github.com/scoverage/scoverage-maven-plugin/compare/scoverage-maven-plugin-2.0.0...scoverage-maven-plugin-2.0.1) --- updated-dependencies: - dependency-name: org.scoverage:scoverage-maven-plugin dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f0759de70..4649e5edc 100644 --- a/pom.xml +++ b/pom.xml @@ -149,7 +149,7 @@ org.scoverage scoverage-maven-plugin - 2.0.0 + 2.0.1 scoverage-report From 3fb14735095a9a4f229cf4642c686d0b05b5c75a Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Thu, 11 Jan 2024 18:31:25 +0100 Subject: [PATCH 05/44] add st_within --- docs/code-example-notebooks/predicates.scala | 33 ++++++ docs/source/api/spatial-predicates.rst | 62 ++++++++++- python/mosaic/api/predicates.py | 21 ++++ .../mosaic/core/geometry/MosaicGeometry.scala | 2 + .../core/geometry/MosaicGeometryJTS.scala | 2 + .../expressions/geometry/ST_Within.scala | 64 +++++++++++ .../labs/mosaic/functions/MosaicContext.scala | 2 + .../geometry/ST_WithinBehaviors.scala | 104 ++++++++++++++++++ .../expressions/geometry/ST_WithinTest.scala | 13 +++ 9 files changed, 302 insertions(+), 1 deletion(-) create mode 100644 src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala create mode 100644 src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala create mode 100644 src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala diff --git a/docs/code-example-notebooks/predicates.scala b/docs/code-example-notebooks/predicates.scala index 9374d4f67..df51a94fb 100644 --- a/docs/code-example-notebooks/predicates.scala +++ b/docs/code-example-notebooks/predicates.scala @@ -87,3 +87,36 @@ df.select(st_intersects($"p1", $"p2")).show(false) // MAGIC %r // MAGIC df <- createDataFrame(data.frame(p1 = "POLYGON ((0 0, 0 3, 3 3, 3 0))", p2 = "POLYGON ((2 2, 2 4, 4 4, 4 2))")) // MAGIC showDF(select(df, st_intersects(column("p1"), column("p2"))), truncate=F) + +// MAGIC %md +// MAGIC ### st_within + +// COMMAND ---------- + +// MAGIC %python +// MAGIC help(st_within) + +// COMMAND ---------- + +// MAGIC %python +// MAGIC df = spark.createDataFrame([{'point': 'POINT (25 15)', 'poly': 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'}]) +// MAGIC df.select(st_within('point', 'poly')).show() + +// COMMAND ---------- + +val df = List(("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")).toDF("point", "poly") +df.select(st_within($"point", $"poly")).show() + +// COMMAND ---------- + +// MAGIC %sql +// MAGIC SELECT st_within("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") + +// COMMAND ---------- + +// MAGIC %r +// MAGIC df <- createDataFrame(data.frame(point = c( "POINT (25 15)"), poly = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")) +// MAGIC showDF(select(df, st_within(column("point"), column("poly")))) + +// COMMAND ---------- + diff --git a/docs/source/api/spatial-predicates.rst b/docs/source/api/spatial-predicates.rst index 1a45dc745..09fc6fa31 100644 --- a/docs/source/api/spatial-predicates.rst +++ b/docs/source/api/spatial-predicates.rst @@ -58,6 +58,8 @@ st_contains | true| +------------------------+ +.. note:: ST_Within is the inverse of ST_Contains, where ST_Contains(a, b)==ST_Within(b,a). + st_intersects ************* @@ -114,4 +116,62 @@ st_intersects | true| +---------------------+ -.. note:: Intersection logic will be dependent on the chosen geometry API (ESRI or JTS). ESRI is only available for mosaic < 0.4.x series, in mosaic >= 0.4.0 JTS is the only geometry API. \ No newline at end of file +.. note:: Intersection logic will be dependent on the chosen geometry API (ESRI or JTS). ESRI is only available for mosaic < 0.4.x series, in mosaic >= 0.4.0 JTS is the only geometry API. + +st_within +********* + +.. function:: st_within(geom1, geom2) + + Returns `true` if `geom1` 'spatially' is within `geom2`. + + :param geom1: Geometry + :type geom1: Column + :param geom2: Geometry + :type geom2: Column + :rtype: Column: BooleanType + + :example: + +.. tabs:: + .. code-tab:: py + + df = spark.createDataFrame([{'point': 'POINT (25 15)', 'poly': 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'}]) + df.select(st_within('point', 'poly')).show() + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: scala + + val df = List(("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")).toDF("point", "poly") + df.select(st_within($"point", $"poly")).show() + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: sql + + SELECT st_within("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: r R + + df <- createDataFrame(data.frame(point = c( "POINT (25 15)"), poly = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")) + showDF(select(df, st_within(column("point"), column("poly")))) + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + +.. note:: ST_Within is the inverse of ST_Contains, where ST_Contains(a, b)==ST_Within(b,a). + diff --git a/python/mosaic/api/predicates.py b/python/mosaic/api/predicates.py index 0b7d01815..39f856597 100644 --- a/python/mosaic/api/predicates.py +++ b/python/mosaic/api/predicates.py @@ -52,3 +52,24 @@ def st_contains(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: pyspark_to_java_column(geom1), pyspark_to_java_column(geom2), ) + + +def st_within(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: + """ + Returns `true` if geom1 'spatially' is within geom2. + + Parameters + ---------- + geom1 : Column + geom2 : Column + + Returns + ------- + Column (BooleanType) + + """ + return config.mosaic_context.invoke_function( + "st_within", + pyspark_to_java_column(geom1), + pyspark_to_java_column(geom2), + ) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala index 8af8c9996..2063ca7c7 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala @@ -83,6 +83,8 @@ trait MosaicGeometry extends GeometryWriter with Serializable { def contains(other: MosaicGeometry): Boolean + def within(other: MosaicGeometry): Boolean + def flatten: Seq[MosaicGeometry] def equals(other: MosaicGeometry): Boolean diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala index 17960d423..25e723510 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala @@ -148,6 +148,8 @@ abstract class MosaicGeometryJTS(geom: Geometry) extends MosaicGeometry { override def contains(geom2: MosaicGeometry): Boolean = geom.contains(geom2.asInstanceOf[MosaicGeometryJTS].getGeom) + override def within(geom2: MosaicGeometry): Boolean = geom.within(geom2.asInstanceOf[MosaicGeometryJTS].getGeom) + def getGeom: Geometry = geom override def isValid: Boolean = geom.isValid diff --git a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala new file mode 100644 index 000000000..c5cfd5a8f --- /dev/null +++ b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala @@ -0,0 +1,64 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.MosaicGeometry +import com.databricks.labs.mosaic.expressions.base.{GenericExpressionFactory, WithExpressionInfo} +import com.databricks.labs.mosaic.expressions.geometry.base.BinaryVectorExpression +import com.databricks.labs.mosaic.functions.MosaicExpressionConfig +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types.{BooleanType, DataType} + +/** + * Returns true if leftGeom is within rightGeom. + * @param leftGeom + * The left geometry. + * @param rightGeom + * The right geometry. + * @param expressionConfig + * Additional arguments for the expression (expressionConfigs). + */ +case class ST_Within( + leftGeom: Expression, + rightGeom: Expression, + expressionConfig: MosaicExpressionConfig +) extends BinaryVectorExpression[ST_Within]( + leftGeom, + rightGeom, + returnsGeometry = false, + expressionConfig + ) { + + override def dataType: DataType = BooleanType + + override def geometryTransform(leftGeometry: MosaicGeometry, rightGeometry: MosaicGeometry): Any = { + leftGeometry.within(rightGeometry) + } + + override def geometryCodeGen(leftGeometryRef: String, rightGeometryRef: String, ctx: CodegenContext): (String, String) = { + val within = ctx.freshName("within") + val code = s"""boolean $within = $leftGeometryRef.within($rightGeometryRef);""" + (code, within) + } + +} + +/** Expression info required for the expression registration for spark SQL. */ +object ST_Within extends WithExpressionInfo { + + override def name: String = "st_within" + + override def usage: String = "_FUNC_(expr1, expr2) - Returns true if expr1 is within expr2." + + override def example: String = + """ + | Examples: + | > SELECT _FUNC_(A, B); + | true + | """.stripMargin + + override def builder(expressionConfig: MosaicExpressionConfig): FunctionBuilder = { + GenericExpressionFactory.getBaseBuilder[ST_Within](2, expressionConfig) + } + +} diff --git a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala index fbb0bb922..2cdfeb3a5 100644 --- a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala +++ b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala @@ -177,6 +177,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends mosaicRegistry.registerExpression[ST_UnaryUnion](expressionConfig) mosaicRegistry.registerExpression[ST_Union](expressionConfig) mosaicRegistry.registerExpression[ST_UpdateSRID](expressionConfig) + mosaicRegistry.registerExpression[ST_Within](expressionConfig) mosaicRegistry.registerExpression[ST_X](expressionConfig) mosaicRegistry.registerExpression[ST_Y](expressionConfig) mosaicRegistry.registerExpression[ST_Haversine](expressionConfig) @@ -630,6 +631,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends /** Spatial predicates */ def st_contains(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Contains(geom1.expr, geom2.expr, expressionConfig)) def st_intersects(left: Column, right: Column): Column = ColumnAdapter(ST_Intersects(left.expr, right.expr, expressionConfig)) + def st_within(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Within(geom1.expr, geom2.expr, expressionConfig)) /** RasterAPI dependent functions */ def rst_bandmetadata(raster: Column, band: Column): Column = diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala new file mode 100644 index 000000000..fba4805b5 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala @@ -0,0 +1,104 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.functions.MosaicContext +import com.databricks.labs.mosaic.test.MosaicSpatialQueryTest +import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator} +import org.apache.spark.sql.execution.WholeStageCodegenExec +import org.apache.spark.sql.functions.lit +import org.apache.spark.sql.types._ +import org.scalatest.matchers.must.Matchers.noException +import org.scalatest.matchers.should.Matchers.{an, be, convertToAnyShouldWrapper} + +trait ST_WithinBehaviors extends MosaicSpatialQueryTest { + + def withinBehavior(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + import mc.functions._ + val sc = spark + import sc.implicits._ + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", poly, true), + ("POINT (25 25)", poly, false) + ) + + val results = rows + .toDF("leftGeom", "rightGeom", "expected") + .withColumn("result", st_within($"leftGeom", $"rightGeom")) + .where($"expected" === $"result") + + results.count shouldBe 2 + } + + def withinCodegen(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + val sc = spark + import mc.functions._ + import sc.implicits._ + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", true), + ("POINT (25 25)", false) + ) + + val polygons = List(poly).toDF("rightGeom") + val points = rows.toDF("leftGeom", "expected") + + val result = polygons + .crossJoin(points) + .withColumn("result", st_within($"leftGeom", $"rightGeom")) + .where($"expected" === $"result") + + val queryExecution = result.queryExecution + val plan = queryExecution.executedPlan + + val wholeStageCodegenExec = plan.find(_.isInstanceOf[WholeStageCodegenExec]) + + wholeStageCodegenExec.isDefined shouldBe true + + val codeGenStage = wholeStageCodegenExec.get.asInstanceOf[WholeStageCodegenExec] + val (_, code) = codeGenStage.doCodeGen() + + noException should be thrownBy CodeGenerator.compile(code) + + val stWithin = ST_Within(lit(rows.head._1).expr, lit(1).expr, mc.expressionConfig) + val ctx = new CodegenContext + an[Error] should be thrownBy stWithin.genCode(ctx) + } + + def auxiliaryMethods(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", true), + ("POINT (25 25)", false) + ) + + val stWithin = ST_Within(lit(rows.head._1).expr, lit(poly).expr, mc.expressionConfig) + + stWithin.left shouldEqual lit(rows.head._1).expr + stWithin.right shouldEqual lit(poly).expr + stWithin.dataType shouldEqual BooleanType + noException should be thrownBy stWithin.makeCopy(Array(stWithin.left, stWithin.right)) + + } + +} diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala new file mode 100644 index 000000000..963843c27 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala @@ -0,0 +1,13 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.test.MosaicSpatialQueryTest +import org.apache.spark.sql.test.SharedSparkSession + +class ST_WithinTest extends MosaicSpatialQueryTest with SharedSparkSession with ST_WithinBehaviors { + + testAllGeometriesNoCodegen("ST_Within behavior") { withinBehavior } + testAllGeometriesCodegen("ST_Within codegen compilation") { withinCodegen } + testAllGeometriesCodegen("ST_Within codegen behavior") { withinBehavior } + testAllGeometriesNoCodegen("ST_Within auxiliary methods") { auxiliaryMethods } + +} From e95fd9e3655457dac42ad19151da5fee9e509ba8 Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Thu, 11 Jan 2024 23:23:40 +0100 Subject: [PATCH 06/44] expect closed linestring for polygon --- .../mosaic/core/geometry/polygon/MosaicPolygonJTS.scala | 6 +++--- .../mosaic/core/geometry/polygon/TestPolygonJTS.scala | 4 ++-- .../labs/mosaic/core/index/TestCustomIndexSystem.scala | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala index 6487f2f66..1e923099a 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala @@ -84,14 +84,14 @@ object MosaicPolygonJTS extends GeometryReader { val newGeom = GeometryTypeEnum.fromString(geomSeq.head.getGeometryType) match { case POINT => val extractedPoints = geomSeq.map(_.asInstanceOf[MosaicPointJTS]) - val exteriorRing = extractedPoints.map(_.coord).toArray ++ Array(extractedPoints.head.coord) + val exteriorRing = extractedPoints.map(_.coord).toArray gf.createPolygon(exteriorRing) case LINESTRING => val extractedLines = geomSeq.map(_.asInstanceOf[MosaicLineStringJTS]) val exteriorRing = - gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray ++ Array(extractedLines.head.asSeq.head.coord)) + gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray) val holes = extractedLines.tail - .map({ h: MosaicLineStringJTS => h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) }) + .map({ h: MosaicLineStringJTS => h.asSeq.map(_.coord).toArray}) .map(gf.createLinearRing) .toArray gf.createPolygon(exteriorRing, holes) diff --git a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala index 498e2f84f..167007434 100644 --- a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala +++ b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala @@ -42,7 +42,7 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicPointJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") - val pointSeq = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)") + val pointSeq = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)", "POINT (30 10)") .map(MosaicPointJTS.fromWKT) .map(_.asInstanceOf[MosaicPointJTS]) val polygonTest = MosaicPolygonJTS.fromSeq(pointSeq) @@ -59,7 +59,7 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicLineStringJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))") - val linesSeq = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20)", "LINESTRING (20 30, 35 35, 30 20)") + val linesSeq = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20, 35 10)", "LINESTRING (20 30, 35 35, 30 20, 20 30)") .map(MosaicLineStringJTS.fromWKT) .map(_.asInstanceOf[MosaicLineStringJTS]) val polygonTest = MosaicPolygonJTS.fromSeq(linesSeq) diff --git a/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala b/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala index b20a19c91..022de18a4 100644 --- a/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala +++ b/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala @@ -118,16 +118,16 @@ class TestCustomIndexSystem extends AnyFunSuite { // First quadrant val wkt0 = grid.indexToGeometry(0 | resolutionMask, JTS).toWKT - wkt0 shouldBe "POLYGON ((0 0, 50 0, 50 50, 0 50, 0 0, 0 0))" + wkt0 shouldBe "POLYGON ((0 0, 50 0, 50 50, 0 50, 0 0))" val wkt1 = grid.indexToGeometry(1 | resolutionMask, JTS).toWKT - wkt1 shouldBe "POLYGON ((50 0, 100 0, 100 50, 50 50, 50 0, 50 0))" + wkt1 shouldBe "POLYGON ((50 0, 100 0, 100 50, 50 50, 50 0))" val wkt2 = grid.indexToGeometry(2 | resolutionMask, JTS).toWKT - wkt2 shouldBe "POLYGON ((0 50, 50 50, 50 100, 0 100, 0 50, 0 50))" + wkt2 shouldBe "POLYGON ((0 50, 50 50, 50 100, 0 100, 0 50))" val wkt3 = grid.indexToGeometry(3 | resolutionMask, JTS).toWKT - wkt3 shouldBe "POLYGON ((50 50, 100 50, 100 100, 50 100, 50 50, 50 50))" + wkt3 shouldBe "POLYGON ((50 50, 100 50, 100 100, 50 100, 50 50))" } test("polyfill single cell") { From 5956cb2ff91d7f5f1ab16ce9e6bf0b0bb1751cf9 Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Thu, 11 Jan 2024 23:56:47 +0100 Subject: [PATCH 07/44] make appends conditional --- .../geometry/polygon/MosaicPolygonJTS.scala | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala index 1e923099a..94e4e4982 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala @@ -84,14 +84,26 @@ object MosaicPolygonJTS extends GeometryReader { val newGeom = GeometryTypeEnum.fromString(geomSeq.head.getGeometryType) match { case POINT => val extractedPoints = geomSeq.map(_.asInstanceOf[MosaicPointJTS]) - val exteriorRing = extractedPoints.map(_.coord).toArray + val exteriorRing = + if (extractedPoints.head.coord == extractedPoints.last.coord) { + extractedPoints.map(_.coord).toArray + } else { + extractedPoints.map(_.coord).toArray ++ Array(extractedPoints.head.coord) + } gf.createPolygon(exteriorRing) case LINESTRING => val extractedLines = geomSeq.map(_.asInstanceOf[MosaicLineStringJTS]) val exteriorRing = - gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray) + if (extractedLines.head.asSeq.head.coord == extractedLines.head.asSeq.last.coord) { + gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray) + } else { + gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray ++ Array(extractedLines.head.asSeq.head.coord)) + } val holes = extractedLines.tail - .map({ h: MosaicLineStringJTS => h.asSeq.map(_.coord).toArray}) + .map({ h: MosaicLineStringJTS => + if (h.asSeq.head.coord == h.asSeq.last.coord) h.asSeq.map(_.coord).toArray + else h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) + }) .map(gf.createLinearRing) .toArray gf.createPolygon(exteriorRing, holes) From 991199b073253ec92eee371e3bdd5bf8cb780da1 Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Fri, 12 Jan 2024 00:06:21 +0100 Subject: [PATCH 08/44] add brackets for multiline if for readability --- .../mosaic/core/geometry/polygon/MosaicPolygonJTS.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala index 94e4e4982..33605eaa8 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala @@ -101,8 +101,11 @@ object MosaicPolygonJTS extends GeometryReader { } val holes = extractedLines.tail .map({ h: MosaicLineStringJTS => - if (h.asSeq.head.coord == h.asSeq.last.coord) h.asSeq.map(_.coord).toArray - else h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) + if (h.asSeq.head.coord == h.asSeq.last.coord) { + h.asSeq.map(_.coord).toArray + } else { + h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) + } }) .map(gf.createLinearRing) .toArray From f9e3fa03e42bcd8392657ce0a8479fed2671e138 Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Fri, 12 Jan 2024 00:27:07 +0100 Subject: [PATCH 09/44] add tests for coverage --- .../geometry/polygon/TestPolygonJTS.scala | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala index 167007434..f63fbc9e2 100644 --- a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala +++ b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala @@ -42,11 +42,16 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicPointJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") - val pointSeq = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)", "POINT (30 10)") + val pointSeq_open = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)") .map(MosaicPointJTS.fromWKT) .map(_.asInstanceOf[MosaicPointJTS]) - val polygonTest = MosaicPolygonJTS.fromSeq(pointSeq) - polygonReference.equals(polygonTest) shouldBe true + val pointSeq_closed = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)", "POINT (30 10)") + .map(MosaicPointJTS.fromWKT) + .map(_.asInstanceOf[MosaicPointJTS]) + val polygonTest_open = MosaicPolygonJTS.fromSeq(pointSeq_open) + val polygonTest_closed = MosaicPolygonJTS.fromSeq(pointSeq_closed) + polygonReference.equals(polygonTest_open) shouldBe true + polygonReference.equals(polygonTest_closed) shouldBe true } "MosaicPolygonJTS" should "not fail for empty Seq" in { @@ -59,11 +64,16 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicLineStringJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))") - val linesSeq = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20, 35 10)", "LINESTRING (20 30, 35 35, 30 20, 20 30)") + val linesSeq_open = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20)", "LINESTRING (20 30, 35 35, 30 20)") + .map(MosaicLineStringJTS.fromWKT) + .map(_.asInstanceOf[MosaicLineStringJTS]) + val linesSeq_closed = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20, 35 10)", "LINESTRING (20 30, 35 35, 30 20, 20 30)") .map(MosaicLineStringJTS.fromWKT) .map(_.asInstanceOf[MosaicLineStringJTS]) - val polygonTest = MosaicPolygonJTS.fromSeq(linesSeq) - polygonReference.equals(polygonTest) shouldBe true + val polygonTest_open = MosaicPolygonJTS.fromSeq(linesSeq_open) + val polygonTest_closed = MosaicPolygonJTS.fromSeq(linesSeq_closed) + polygonReference.equals(polygonTest_open) shouldBe true + polygonReference.equals(polygonTest_closed) shouldBe true } "MosaicPolygonJTS" should "return a Seq of MosaicLineStringJTS object when calling asSeq" in { From 4b4ec85c4d88a895b31b56e4a3577da5a2290c98 Mon Sep 17 00:00:00 2001 From: Daniel Sparing Date: Sat, 13 Jan 2024 14:12:19 +0100 Subject: [PATCH 10/44] fix docstring so that edgeratio=0 is not boundingbox --- python/mosaic/api/functions.py | 14 +++++++------- .../expressions/geometry/ST_ConcaveHull.scala | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/python/mosaic/api/functions.py b/python/mosaic/api/functions.py index 47c2e75df..9819caa5f 100644 --- a/python/mosaic/api/functions.py +++ b/python/mosaic/api/functions.py @@ -159,13 +159,13 @@ def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: Any = """ Compute the concave hull of a geometry or multi-geometry object. It uses lengthRatio and - allowHoles to determine the concave hull. lengthRatio is the ratio of the - length of the concave hull to the length of the convex hull. If set to 1, - this is the same as the convex hull. If set to 0, this is the same as the - bounding box. AllowHoles is a boolean that determines whether the concave - hull can have holes. If set to true, the concave hull can have holes. If set - to false, the concave hull will not have holes. (For PostGIS, the default is - false.) + allowHoles to determine the concave hull. lengthRatio is the fraction of the + difference between the longest and shortest edge lengths in the Delaunay + Triangulation. If set to 1, this is the same as the convex hull. If set to + 0, it produces produces maximum concaveness. AllowHoles is a boolean that + determines whether the concave hull can have holes. If set to true, the + concave hull can have holes. If set to false, the concave hull will not have + holes. (For PostGIS, the default is false.) Parameters ---------- diff --git a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala index 6bf602a5b..0a4cc88d5 100644 --- a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala +++ b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala @@ -12,13 +12,13 @@ import org.apache.spark.sql.types.DataType /** * Returns the concave hull for a given geometry. It uses lengthRatio and - * allowHoles to determine the concave hull. lengthRatio is the ratio of the - * length of the concave hull to the length of the convex hull. If set to 1, - * this is the same as the convex hull. If set to 0, this is the same as the - * bounding box. AllowHoles is a boolean that determines whether the concave - * hull can have holes. If set to true, the concave hull can have holes. If set - * to false, the concave hull will not have holes. (For PostGIS, the default is - * false.) + * allowHoles to determine the concave hull. lengthRatio is the fraction of the + * difference between the longest and shortest edge lengths in the Delaunay + * Triangulation. If set to 1, this is the same as the convex hull. If set to + * 0, it produces produces maximum concaveness. AllowHoles is a boolean that + * determines whether the concave hull can have holes. If set to true, the + * concave hull can have holes. If set to false, the concave hull will not have + * holes. (For PostGIS, the default is false.) * @param inputGeom * The input geometry. * @param expressionConfig From 54295327e3315a6fdb8fa75aa5466264a92a9aad Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 13:44:16 +0000 Subject: [PATCH 11/44] Bump version number. --- R/sparkR-mosaic/sparkrMosaic/DESCRIPTION | 2 +- R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION | 2 +- R/sparklyr-mosaic/tests.R | 2 +- docs/source/conf.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION b/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION index 351d48fe9..7bc9e3f62 100644 --- a/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION +++ b/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION @@ -1,6 +1,6 @@ Package: sparkrMosaic Title: SparkR bindings for Databricks Mosaic -Version: 0.3.14 +Version: 0.4.0 Authors@R: person("Robert", "Whiffin", , "robert.whiffin@databricks.com", role = c("aut", "cre") ) diff --git a/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION b/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION index dc3fd0904..4dbd7b03d 100644 --- a/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION +++ b/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION @@ -1,6 +1,6 @@ Package: sparklyrMosaic Title: sparklyr bindings for Databricks Mosaic -Version: 0.3.14 +Version: 0.4.0 Authors@R: person("Robert", "Whiffin", , "robert.whiffin@databricks.com", role = c("aut", "cre") ) diff --git a/R/sparklyr-mosaic/tests.R b/R/sparklyr-mosaic/tests.R index 18d441864..17bdd882a 100644 --- a/R/sparklyr-mosaic/tests.R +++ b/R/sparklyr-mosaic/tests.R @@ -11,7 +11,7 @@ library(sparklyr) spark_home <- Sys.getenv("SPARK_HOME") spark_home_set(spark_home) -install.packages("sparklyrMosaic_0.3.14.tar.gz", repos = NULL) +install.packages("sparklyrMosaic_0.4.0.tar.gz", repos = NULL) library(sparklyrMosaic) # find the mosaic jar in staging diff --git a/docs/source/conf.py b/docs/source/conf.py index c2772c7a0..b8e4b5c3d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -22,7 +22,7 @@ author = 'Stuart Lynn, Milos Colic, Erni Durdevic, Robert Whiffin, Timo Roest' # The full version, including alpha/beta/rc tags -release = "v0.3.14" +release = "v0.4.0" # -- General configuration --------------------------------------------------- From a100496dbbafd68116093f92e4518ab049ff2176 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 13:45:53 +0000 Subject: [PATCH 12/44] Re-enable R builds. --- .github/workflows/build_main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_main.yml b/.github/workflows/build_main.yml index 4f0c676cd..f1088a5b6 100644 --- a/.github/workflows/build_main.yml +++ b/.github/workflows/build_main.yml @@ -28,7 +28,7 @@ jobs: uses: ./.github/actions/scala_build - name: build python uses: ./.github/actions/python_build - # - name: build R - # uses: ./.github/actions/r_build + - name: build R + uses: ./.github/actions/r_build - name: upload artefacts uses: ./.github/actions/upload_artefacts From df84708ed7567e0d69ca01e526cebf503585aa31 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:09:50 +0000 Subject: [PATCH 13/44] Update sphinx --- docs/docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index a448f7f7d..21fbfcae0 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -1,5 +1,5 @@ setuptools==68.1.2 -Sphinx==4.4.0 +Sphinx==6.1.0 sphinx-material==0.0.35 nbsphinx==0.8.8 ipython>=8.10.1 From 444d68ad0d2e71b7c57ba3545a36435eb2b97369 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:15:54 +0000 Subject: [PATCH 14/44] Update sphinx --- docs/docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index 21fbfcae0..81f6f010f 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -1,6 +1,6 @@ setuptools==68.1.2 Sphinx==6.1.0 -sphinx-material==0.0.35 +sphinx-material==0.0.36 nbsphinx==0.8.8 ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 From ca81c0564eb2647719502d8662cd91a230766b1c Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:20:43 +0000 Subject: [PATCH 15/44] Update sphinx --- docs/docs-requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index 81f6f010f..f5de2239b 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -5,6 +5,5 @@ nbsphinx==0.8.8 ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 livereload==2.6.3 -autodocsumm==0.2.7 sphinx-tabs==3.2.0 renku-sphinx-theme==0.2.3 \ No newline at end of file From b4606a13226dc41081e892f5239a62d71b083202 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:26:02 +0000 Subject: [PATCH 16/44] Update sphinx --- docs/docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index f5de2239b..8be84653e 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -5,5 +5,5 @@ nbsphinx==0.8.8 ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 livereload==2.6.3 -sphinx-tabs==3.2.0 +sphinx-tabs==3.4.4 renku-sphinx-theme==0.2.3 \ No newline at end of file From bdafaa5a4e3bd4a5351e60dffd3656d4eef2a678 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:29:18 +0000 Subject: [PATCH 17/44] Update sphinx --- docs/docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index 8be84653e..1fd60955e 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -6,4 +6,4 @@ ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 livereload==2.6.3 sphinx-tabs==3.4.4 -renku-sphinx-theme==0.2.3 \ No newline at end of file +renku-sphinx-theme==0.3.0 \ No newline at end of file From 0ab664694e459a005ef347b6f4245f3698701d97 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:50:54 +0000 Subject: [PATCH 18/44] Update sphinx --- docs/docs-requirements.txt | 4 +- docs/source/api/raster-format-readers.rst | 67 +++++++++--------- docs/source/api/vector-format-readers.rst | 85 ++++++++++------------- 3 files changed, 73 insertions(+), 83 deletions(-) diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index 1fd60955e..969601087 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -1,7 +1,7 @@ setuptools==68.1.2 -Sphinx==6.1.0 +Sphinx==6.1.3 sphinx-material==0.0.36 -nbsphinx==0.8.8 +nbsphinx>=0.8.8 ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 livereload==2.6.3 diff --git a/docs/source/api/raster-format-readers.rst b/docs/source/api/raster-format-readers.rst index dabcc821e..d41277fd7 100644 --- a/docs/source/api/raster-format-readers.rst +++ b/docs/source/api/raster-format-readers.rst @@ -4,22 +4,23 @@ Raster Format Readers Intro -################ +##### Mosaic provides spark readers for the following raster formats: - * GTiff (GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/gtiff.html - * COG (Cloud Optimized GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/cog.html - * HDF4 using .hdf file extension - https://gdal.org/drivers/raster/hdf4.html - * HDF5 using .h5 file extension - https://gdal.org/drivers/raster/hdf5.html - * NetCDF using .nc file extension - https://gdal.org/drivers/raster/netcdf.html - * JP2ECW using .jp2 file extension - https://gdal.org/drivers/raster/jp2ecw.html - * JP2KAK using .jp2 file extension - https://gdal.org/drivers/raster/jp2kak.html - * JP2OpenJPEG using .jp2 file extension - https://gdal.org/drivers/raster/jp2openjpeg.html - * PDF using .pdf file extension - https://gdal.org/drivers/raster/pdf.html - * PNG using .png file extension - https://gdal.org/drivers/raster/png.html - * VRT using .vrt file extension - https://gdal.org/drivers/raster/vrt.html - * XPM using .xpm file extension - https://gdal.org/drivers/raster/xpm.html - * GRIB using .grb file extension - https://gdal.org/drivers/raster/grib.html - * Zarr using .zarr file extension - https://gdal.org/drivers/raster/zarr.html +* GTiff (GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/gtiff.html +* COG (Cloud Optimized GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/cog.html +* HDF4 using .hdf file extension - https://gdal.org/drivers/raster/hdf4.html +* HDF5 using .h5 file extension - https://gdal.org/drivers/raster/hdf5.html +* NetCDF using .nc file extension - https://gdal.org/drivers/raster/netcdf.html +* JP2ECW using .jp2 file extension - https://gdal.org/drivers/raster/jp2ecw.html +* JP2KAK using .jp2 file extension - https://gdal.org/drivers/raster/jp2kak.html +* JP2OpenJPEG using .jp2 file extension - https://gdal.org/drivers/raster/jp2openjpeg.html +* PDF using .pdf file extension - https://gdal.org/drivers/raster/pdf.html +* PNG using .png file extension - https://gdal.org/drivers/raster/png.html +* VRT using .vrt file extension - https://gdal.org/drivers/raster/vrt.html +* XPM using .xpm file extension - https://gdal.org/drivers/raster/xpm.html +* GRIB using .grb file extension - https://gdal.org/drivers/raster/grib.html +* Zarr using .zarr file extension - https://gdal.org/drivers/raster/zarr.html + Other formats are supported if supported by GDAL available drivers. Mosaic provides two flavors of the readers: @@ -32,14 +33,14 @@ spark.read.format("gdal") A base Spark SQL data source for reading GDAL raster data sources. It reads metadata of the raster and exposes the direct paths for the raster files. The output of the reader is a DataFrame with the following columns: - * tile - loaded raster tile (RasterTileType) - * ySize - height of the raster in pixels (IntegerType) - * xSize - width of the raster in pixels (IntegerType) - * bandCount - number of bands in the raster (IntegerType) - * metadata - raster metadata (MapType(StringType, StringType)) - * subdatasets - raster subdatasets (MapType(StringType, StringType)) - * srid - raster spatial reference system identifier (IntegerType) - * proj4Str - raster spatial reference system proj4 string (StringType) +* tile - loaded raster tile (RasterTileType) +* ySize - height of the raster in pixels (IntegerType) +* xSize - width of the raster in pixels (IntegerType) +* bandCount - number of bands in the raster (IntegerType) +* metadata - raster metadata (MapType(StringType, StringType)) +* subdatasets - raster subdatasets (MapType(StringType, StringType)) +* srid - raster spatial reference system identifier (IntegerType) +* proj4Str - raster spatial reference system proj4 string (StringType) .. function:: spark.read.format("gdal").load(path) @@ -94,16 +95,16 @@ If the raster pixels are larger than the grid cells, the cell values can be calc The interpolation method used is Inverse Distance Weighting (IDW) where the distance function is a k_ring distance of the grid. The reader supports the following options: - * fileExtension - file extension of the raster file (StringType) - default is *.* - * vsizip - if the rasters are zipped files, set this to true (BooleanType) - * resolution - resolution of the output grid (IntegerType) - * combiner - combiner operation to use when converting raster to grid (StringType) - default is mean - * retile - if the rasters are too large they can be re-tiled to smaller tiles (BooleanType) - * tileSize - size of the re-tiled tiles, tiles are always squares of tileSize x tileSize (IntegerType) - * readSubdatasets - if the raster has subdatasets set this to true (BooleanType) - * subdatasetNumber - if the raster has subdatasets, select a specific subdataset by index (IntegerType) - * subdatasetName - if the raster has subdatasets, select a specific subdataset by name (StringType) - * kRingInterpolate - if the raster pixels are larger than the grid cells, use k_ring interpolation with n = kRingInterpolate (IntegerType) +* fileExtension - file extension of the raster file (StringType) - default is *.* +* vsizip - if the rasters are zipped files, set this to true (BooleanType) +* resolution - resolution of the output grid (IntegerType) +* combiner - combiner operation to use when converting raster to grid (StringType) - default is mean +* retile - if the rasters are too large they can be re-tiled to smaller tiles (BooleanType) +* tileSize - size of the re-tiled tiles, tiles are always squares of tileSize x tileSize (IntegerType) +* readSubdatasets - if the raster has subdatasets set this to true (BooleanType) +* subdatasetNumber - if the raster has subdatasets, select a specific subdataset by index (IntegerType) +* subdatasetName - if the raster has subdatasets, select a specific subdataset by name (StringType) +* kRingInterpolate - if the raster pixels are larger than the grid cells, use k_ring interpolation with n = kRingInterpolate (IntegerType) .. function:: mos.read().format("raster_to_grid").load(path) diff --git a/docs/source/api/vector-format-readers.rst b/docs/source/api/vector-format-readers.rst index 8825803d5..f47c86bb5 100644 --- a/docs/source/api/vector-format-readers.rst +++ b/docs/source/api/vector-format-readers.rst @@ -8,36 +8,25 @@ Intro Mosaic provides spark readers for vector files supported by GDAL OGR drivers. Only the drivers that are built by default are supported. Here are some common useful file formats: - * GeoJSON (also ESRIJSON, TopoJSON) - https://gdal.org/drivers/vector/geojson.html - * ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB) - Mosaic implements named reader geo_db (described in this doc) - https://gdal.org/drivers/vector/filegdb.html - * ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) - https://gdal.org/drivers/vector/shapefile.html - * Network Common Data Form (netCDF) - Mosaic implements raster reader also - https://gdal.org/drivers/raster/netcdf.html - * (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon - https://gdal.org/drivers/vector/parquet.html - * Spreadsheets (XLSX, XLS, ODS) - https://gdal.org/drivers/vector/xls.html - * U.S. Census TIGER/Line (TIGER) - https://gdal.org/drivers/vector/tiger.html - * PostgreSQL Dump (PGDump) - https://gdal.org/drivers/vector/pgdump.html - * Keyhole Markup Language (KML) - https://gdal.org/drivers/vector/kml.html - * Geography Markup Language (GML) - https://gdal.org/drivers/vector/gml.html - * GRASS - option for Linear Referencing Systems (LRS) - https://gdal.org/drivers/vector/grass.html +* GeoJSON (also ESRIJSON, TopoJSON) https://gdal.org/drivers/vector/geojson.html +* ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB). Mosaic implements named reader geo_db (described in this doc). https://gdal.org/drivers/vector/filegdb.html +* ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) https://gdal.org/drivers/vector/shapefile.html +* Network Common Data Form (netCDF) - Mosaic implements raster reader also https://gdal.org/drivers/raster/netcdf.html +* (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon https://gdal.org/drivers/vector/parquet.html +* Spreadsheets (XLSX, XLS, ODS) https://gdal.org/drivers/vector/xls.html +* U.S. Census TIGER/Line (TIGER) https://gdal.org/drivers/vector/tiger.html +* PostgreSQL Dump (PGDump) https://gdal.org/drivers/vector/pgdump.html +* Keyhole Markup Language (KML) https://gdal.org/drivers/vector/kml.html +* Geography Markup Language (GML) https://gdal.org/drivers/vector/gml.html +* GRASS - option for Linear Referencing Systems (LRS) https://gdal.org/drivers/vector/grass.html + For more information please refer to gdal documentation: https://gdal.org/drivers/vector/index.html Mosaic provides two flavors of the readers: - * spark.read.format("ogr") for reading 1 file per spark task - * mos.read().format("multi_read_ogr") for reading file in parallel with multiple spark tasks +* spark.read.format("ogr") for reading 1 file per spark task +* mos.read().format("multi_read_ogr") for reading file in parallel with multiple spark tasks spark.read.format("ogr") @@ -46,17 +35,17 @@ A base Spark SQL data source for reading GDAL vector data sources. The output of the reader is a DataFrame with inferred schema. The schema is inferred from both features and fields in the vector file. Each feature will be provided as 2 columns: - * geometry - geometry of the feature (GeometryType) - * srid - spatial reference system identifier of the feature (StringType) +* geometry - geometry of the feature (GeometryType) +* srid - spatial reference system identifier of the feature (StringType) The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: - * driverName - GDAL driver name (StringType) - * vsizip - if the vector files are zipped files, set this to true (BooleanType) - * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false - * layerName - name of the layer to read (StringType) - * layerNumber - number of the layer to read (IntegerType) +* driverName - GDAL driver name (StringType) +* vsizip - if the vector files are zipped files, set this to true (BooleanType) +* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false +* layerName - name of the layer to read (StringType) +* layerNumber - number of the layer to read (IntegerType) .. function:: read.format("ogr").load(path) @@ -109,18 +98,18 @@ Chunk size is the number of file rows that will be read per single task. The output of the reader is a DataFrame with inferred schema. The schema is inferred from both features and fields in the vector file. Each feature will be provided as 2 columns: - * geometry - geometry of the feature (GeometryType) - * srid - spatial reference system identifier of the feature (StringType) +* geometry - geometry of the feature (GeometryType) +* srid - spatial reference system identifier of the feature (StringType) The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: - * driverName - GDAL driver name (StringType) - * vsizip - if the vector files are zipped files, set this to true (BooleanType) - * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false - * chunkSize - size of the chunk to read from the file per single task (IntegerType) - default is 5000 - * layerName - name of the layer to read (StringType) - * layerNumber - number of the layer to read (IntegerType) +* driverName - GDAL driver name (StringType) +* vsizip - if the vector files are zipped files, set this to true (BooleanType) +* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false +* chunkSize - size of the chunk to read from the file per single task (IntegerType) - default is 5000 +* layerName - name of the layer to read (StringType) +* layerNumber - number of the layer to read (IntegerType) .. function:: read.format("multi_read_ogr").load(path) @@ -171,10 +160,10 @@ Mosaic provides a reader for GeoDB files natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: - * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false - * layerName - name of the layer to read (StringType) - * layerNumber - number of the layer to read (IntegerType) - * vsizip - if the vector files are zipped files, set this to true (BooleanType) +* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false +* layerName - name of the layer to read (StringType) +* layerNumber - number of the layer to read (IntegerType) +* vsizip - if the vector files are zipped files, set this to true (BooleanType) .. function:: read.format("geo_db").load(path) @@ -223,10 +212,10 @@ Mosaic provides a reader for Shapefiles natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: - * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false - * layerName - name of the layer to read (StringType) - * layerNumber - number of the layer to read (IntegerType) - * vsizip - if the vector files are zipped files, set this to true (BooleanType) +* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false +* layerName - name of the layer to read (StringType) +* layerNumber - number of the layer to read (IntegerType) +* vsizip - if the vector files are zipped files, set this to true (BooleanType) .. function:: read.format("shapefile").load(path) From 540bca928bf5827f989926f26ed949f1fde9e496 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 14:59:46 +0000 Subject: [PATCH 19/44] Update sphinx --- docs/source/api/raster-format-readers.rst | 67 ++++++++++++----------- docs/source/api/vector-format-readers.rst | 65 ++++++++++++---------- 2 files changed, 70 insertions(+), 62 deletions(-) diff --git a/docs/source/api/raster-format-readers.rst b/docs/source/api/raster-format-readers.rst index d41277fd7..3e0c6443e 100644 --- a/docs/source/api/raster-format-readers.rst +++ b/docs/source/api/raster-format-readers.rst @@ -6,20 +6,21 @@ Raster Format Readers Intro ##### Mosaic provides spark readers for the following raster formats: -* GTiff (GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/gtiff.html -* COG (Cloud Optimized GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/cog.html -* HDF4 using .hdf file extension - https://gdal.org/drivers/raster/hdf4.html -* HDF5 using .h5 file extension - https://gdal.org/drivers/raster/hdf5.html -* NetCDF using .nc file extension - https://gdal.org/drivers/raster/netcdf.html -* JP2ECW using .jp2 file extension - https://gdal.org/drivers/raster/jp2ecw.html -* JP2KAK using .jp2 file extension - https://gdal.org/drivers/raster/jp2kak.html -* JP2OpenJPEG using .jp2 file extension - https://gdal.org/drivers/raster/jp2openjpeg.html -* PDF using .pdf file extension - https://gdal.org/drivers/raster/pdf.html -* PNG using .png file extension - https://gdal.org/drivers/raster/png.html -* VRT using .vrt file extension - https://gdal.org/drivers/raster/vrt.html -* XPM using .xpm file extension - https://gdal.org/drivers/raster/xpm.html -* GRIB using .grb file extension - https://gdal.org/drivers/raster/grib.html -* Zarr using .zarr file extension - https://gdal.org/drivers/raster/zarr.html + + * GTiff (GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/gtiff.html + * COG (Cloud Optimized GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/cog.html + * HDF4 using .hdf file extension - https://gdal.org/drivers/raster/hdf4.html + * HDF5 using .h5 file extension - https://gdal.org/drivers/raster/hdf5.html + * NetCDF using .nc file extension - https://gdal.org/drivers/raster/netcdf.html + * JP2ECW using .jp2 file extension - https://gdal.org/drivers/raster/jp2ecw.html + * JP2KAK using .jp2 file extension - https://gdal.org/drivers/raster/jp2kak.html + * JP2OpenJPEG using .jp2 file extension - https://gdal.org/drivers/raster/jp2openjpeg.html + * PDF using .pdf file extension - https://gdal.org/drivers/raster/pdf.html + * PNG using .png file extension - https://gdal.org/drivers/raster/png.html + * VRT using .vrt file extension - https://gdal.org/drivers/raster/vrt.html + * XPM using .xpm file extension - https://gdal.org/drivers/raster/xpm.html + * GRIB using .grb file extension - https://gdal.org/drivers/raster/grib.html + * Zarr using .zarr file extension - https://gdal.org/drivers/raster/zarr.html Other formats are supported if supported by GDAL available drivers. @@ -33,14 +34,15 @@ spark.read.format("gdal") A base Spark SQL data source for reading GDAL raster data sources. It reads metadata of the raster and exposes the direct paths for the raster files. The output of the reader is a DataFrame with the following columns: -* tile - loaded raster tile (RasterTileType) -* ySize - height of the raster in pixels (IntegerType) -* xSize - width of the raster in pixels (IntegerType) -* bandCount - number of bands in the raster (IntegerType) -* metadata - raster metadata (MapType(StringType, StringType)) -* subdatasets - raster subdatasets (MapType(StringType, StringType)) -* srid - raster spatial reference system identifier (IntegerType) -* proj4Str - raster spatial reference system proj4 string (StringType) + + * tile - loaded raster tile (RasterTileType) + * ySize - height of the raster in pixels (IntegerType) + * xSize - width of the raster in pixels (IntegerType) + * bandCount - number of bands in the raster (IntegerType) + * metadata - raster metadata (MapType(StringType, StringType)) + * subdatasets - raster subdatasets (MapType(StringType, StringType)) + * srid - raster spatial reference system identifier (IntegerType) + * proj4Str - raster spatial reference system proj4 string (StringType) .. function:: spark.read.format("gdal").load(path) @@ -95,16 +97,17 @@ If the raster pixels are larger than the grid cells, the cell values can be calc The interpolation method used is Inverse Distance Weighting (IDW) where the distance function is a k_ring distance of the grid. The reader supports the following options: -* fileExtension - file extension of the raster file (StringType) - default is *.* -* vsizip - if the rasters are zipped files, set this to true (BooleanType) -* resolution - resolution of the output grid (IntegerType) -* combiner - combiner operation to use when converting raster to grid (StringType) - default is mean -* retile - if the rasters are too large they can be re-tiled to smaller tiles (BooleanType) -* tileSize - size of the re-tiled tiles, tiles are always squares of tileSize x tileSize (IntegerType) -* readSubdatasets - if the raster has subdatasets set this to true (BooleanType) -* subdatasetNumber - if the raster has subdatasets, select a specific subdataset by index (IntegerType) -* subdatasetName - if the raster has subdatasets, select a specific subdataset by name (StringType) -* kRingInterpolate - if the raster pixels are larger than the grid cells, use k_ring interpolation with n = kRingInterpolate (IntegerType) + + * fileExtension - file extension of the raster file (StringType) - default is *.* + * vsizip - if the rasters are zipped files, set this to true (BooleanType) + * resolution - resolution of the output grid (IntegerType) + * combiner - combiner operation to use when converting raster to grid (StringType) - default is mean + * retile - if the rasters are too large they can be re-tiled to smaller tiles (BooleanType) + * tileSize - size of the re-tiled tiles, tiles are always squares of tileSize x tileSize (IntegerType) + * readSubdatasets - if the raster has subdatasets set this to true (BooleanType) + * subdatasetNumber - if the raster has subdatasets, select a specific subdataset by index (IntegerType) + * subdatasetName - if the raster has subdatasets, select a specific subdataset by name (StringType) + * kRingInterpolate - if the raster pixels are larger than the grid cells, use k_ring interpolation with n = kRingInterpolate (IntegerType) .. function:: mos.read().format("raster_to_grid").load(path) diff --git a/docs/source/api/vector-format-readers.rst b/docs/source/api/vector-format-readers.rst index f47c86bb5..8d9b420e2 100644 --- a/docs/source/api/vector-format-readers.rst +++ b/docs/source/api/vector-format-readers.rst @@ -8,17 +8,18 @@ Intro Mosaic provides spark readers for vector files supported by GDAL OGR drivers. Only the drivers that are built by default are supported. Here are some common useful file formats: -* GeoJSON (also ESRIJSON, TopoJSON) https://gdal.org/drivers/vector/geojson.html -* ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB). Mosaic implements named reader geo_db (described in this doc). https://gdal.org/drivers/vector/filegdb.html -* ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) https://gdal.org/drivers/vector/shapefile.html -* Network Common Data Form (netCDF) - Mosaic implements raster reader also https://gdal.org/drivers/raster/netcdf.html -* (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon https://gdal.org/drivers/vector/parquet.html -* Spreadsheets (XLSX, XLS, ODS) https://gdal.org/drivers/vector/xls.html -* U.S. Census TIGER/Line (TIGER) https://gdal.org/drivers/vector/tiger.html -* PostgreSQL Dump (PGDump) https://gdal.org/drivers/vector/pgdump.html -* Keyhole Markup Language (KML) https://gdal.org/drivers/vector/kml.html -* Geography Markup Language (GML) https://gdal.org/drivers/vector/gml.html -* GRASS - option for Linear Referencing Systems (LRS) https://gdal.org/drivers/vector/grass.html + + * GeoJSON (also ESRIJSON, TopoJSON) https://gdal.org/drivers/vector/geojson.html + * ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB). Mosaic implements named reader geo_db (described in this doc). https://gdal.org/drivers/vector/filegdb.html + * ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) https://gdal.org/drivers/vector/shapefile.html + * Network Common Data Form (netCDF) - Mosaic implements raster reader also https://gdal.org/drivers/raster/netcdf.html + * (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon https://gdal.org/drivers/vector/parquet.html + * Spreadsheets (XLSX, XLS, ODS) https://gdal.org/drivers/vector/xls.html + * U.S. Census TIGER/Line (TIGER) https://gdal.org/drivers/vector/tiger.html + * PostgreSQL Dump (PGDump) https://gdal.org/drivers/vector/pgdump.html + * Keyhole Markup Language (KML) https://gdal.org/drivers/vector/kml.html + * Geography Markup Language (GML) https://gdal.org/drivers/vector/gml.html + * GRASS - option for Linear Referencing Systems (LRS) https://gdal.org/drivers/vector/grass.html For more information please refer to gdal documentation: https://gdal.org/drivers/vector/index.html @@ -41,11 +42,12 @@ Each feature will be provided as 2 columns: The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: -* driverName - GDAL driver name (StringType) -* vsizip - if the vector files are zipped files, set this to true (BooleanType) -* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false -* layerName - name of the layer to read (StringType) -* layerNumber - number of the layer to read (IntegerType) + + * driverName - GDAL driver name (StringType) + * vsizip - if the vector files are zipped files, set this to true (BooleanType) + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false + * layerName - name of the layer to read (StringType) + * layerNumber - number of the layer to read (IntegerType) .. function:: read.format("ogr").load(path) @@ -104,12 +106,13 @@ Each feature will be provided as 2 columns: The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: -* driverName - GDAL driver name (StringType) -* vsizip - if the vector files are zipped files, set this to true (BooleanType) -* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false -* chunkSize - size of the chunk to read from the file per single task (IntegerType) - default is 5000 -* layerName - name of the layer to read (StringType) -* layerNumber - number of the layer to read (IntegerType) + + * driverName - GDAL driver name (StringType) + * vsizip - if the vector files are zipped files, set this to true (BooleanType) + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false + * chunkSize - size of the chunk to read from the file per single task (IntegerType) - default is 5000 + * layerName - name of the layer to read (StringType) + * layerNumber - number of the layer to read (IntegerType) .. function:: read.format("multi_read_ogr").load(path) @@ -160,10 +163,11 @@ Mosaic provides a reader for GeoDB files natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: -* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false -* layerName - name of the layer to read (StringType) -* layerNumber - number of the layer to read (IntegerType) -* vsizip - if the vector files are zipped files, set this to true (BooleanType) + + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false + * layerName - name of the layer to read (StringType) + * layerNumber - number of the layer to read (IntegerType) + * vsizip - if the vector files are zipped files, set this to true (BooleanType) .. function:: read.format("geo_db").load(path) @@ -212,10 +216,11 @@ Mosaic provides a reader for Shapefiles natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: -* asWKB - if the geometry should be returned as WKB (BooleanType) - default is false -* layerName - name of the layer to read (StringType) -* layerNumber - number of the layer to read (IntegerType) -* vsizip - if the vector files are zipped files, set this to true (BooleanType) + + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false + * layerName - name of the layer to read (StringType) + * layerNumber - number of the layer to read (IntegerType) + * vsizip - if the vector files are zipped files, set this to true (BooleanType) .. function:: read.format("shapefile").load(path) From 07b07722f5094b23d4018e5e27cfd66a5a744339 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:11:11 +0000 Subject: [PATCH 20/44] Add docs archive checkout. --- .github/workflows/docs.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a7ca1a226..6245dbf42 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,3 +31,8 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages + - name: checkout v0.3.x archive + run: | + mkdir ./v0.3.x + cd ./v0.3.x + git clone -b gh-pages-v0.3.x --single-branch git@github.com:databrickslabs/mosaic.git From 7bcadafa849f1b3bcc9588f44643e430d371f28d Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:14:43 +0000 Subject: [PATCH 21/44] Add docs archive checkout. --- .github/workflows/docs.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a7ca1a226..a073cd8ba 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,3 +31,9 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages + - name: checkout v0.3.x archive + run: | + mkdir ./v0.3.x + cd ./v0.3.x + git clone -b gh-pages-v0.3.x --single-branch git@github.com:databrickslabs/mosaic.git + From 0fe63deb21645946ca4fc3dbf97b4850cf5a7806 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:20:23 +0000 Subject: [PATCH 22/44] Add docs archive checkout. --- .github/workflows/docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a073cd8ba..ffb786b84 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -35,5 +35,5 @@ jobs: run: | mkdir ./v0.3.x cd ./v0.3.x - git clone -b gh-pages-v0.3.x --single-branch git@github.com:databrickslabs/mosaic.git + git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git From 7baf6299c9f75493f786e6faea7d24ec106e98f0 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:30:07 +0000 Subject: [PATCH 23/44] Add docs archive checkout. --- .github/workflows/docs.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ffb786b84..ef92222e4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,14 +26,15 @@ jobs: with: documentation_path: docs/source requirements_path: docs/docs-requirements.txt - - name: Push changes - uses: ad-m/github-push-action@master - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - branch: gh-pages - name: checkout v0.3.x archive run: | + PWD mkdir ./v0.3.x cd ./v0.3.x git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: gh-pages From e6448a290d6e9f82db640cb9f422794e0286ecd3 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:34:48 +0000 Subject: [PATCH 24/44] Add docs archive checkout. --- .github/workflows/docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ef92222e4..129bca733 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -28,10 +28,10 @@ jobs: requirements_path: docs/docs-requirements.txt - name: checkout v0.3.x archive run: | - PWD mkdir ./v0.3.x cd ./v0.3.x git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git + ls -lah - name: Push changes uses: ad-m/github-push-action@master with: From a892a377e68ff5dfa202b3c6131a6d43d4fd502d Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:44:22 +0000 Subject: [PATCH 25/44] Add docs archive checkout. --- .github/workflows/docs.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 129bca733..e17d8f489 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,7 +31,10 @@ jobs: mkdir ./v0.3.x cd ./v0.3.x git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git - ls -lah + git add --all + git commit -am "Adding v0.3.x docs" + cd ../ + ls -la - name: Push changes uses: ad-m/github-push-action@master with: From 91bf62feed0d4f120a6626af33d6f109017860a2 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:50:05 +0000 Subject: [PATCH 26/44] Add docs archive checkout. --- .github/workflows/docs.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index e17d8f489..5eba6cc62 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -31,9 +31,12 @@ jobs: mkdir ./v0.3.x cd ./v0.3.x git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git + git checkout gh-pages-v0.3.x + git pull + git add --all + cd ./mosaic git add --all git commit -am "Adding v0.3.x docs" - cd ../ ls -la - name: Push changes uses: ad-m/github-push-action@master From bee2aa9adcc469c74a2451af27c491e8443da47d Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:55:21 +0000 Subject: [PATCH 27/44] Add docs archive checkout. --- .github/workflows/docs.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5eba6cc62..67f7264d6 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,9 +30,8 @@ jobs: run: | mkdir ./v0.3.x cd ./v0.3.x - git clone -b gh-pages-v0.3.x --single-branch https://github.com/databrickslabs/mosaic.git + git submodule add https://github.com/databrickslabs/mosaic.git v0.3.x/mosaic git checkout gh-pages-v0.3.x - git pull git add --all cd ./mosaic git add --all From 13f99e9da14a68969bea13c9ad818c65b833b553 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 15:58:56 +0000 Subject: [PATCH 28/44] Add docs archive checkout. --- .github/workflows/docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 67f7264d6..15d193a29 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -33,7 +33,7 @@ jobs: git submodule add https://github.com/databrickslabs/mosaic.git v0.3.x/mosaic git checkout gh-pages-v0.3.x git add --all - cd ./mosaic + ls -la git add --all git commit -am "Adding v0.3.x docs" ls -la From 72a506aa7eb1518819356bbdede57140f0587d77 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:00:09 +0000 Subject: [PATCH 29/44] Add docs archive checkout. --- .github/workflows/docs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 15d193a29..b03bb615e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -28,8 +28,6 @@ jobs: requirements_path: docs/docs-requirements.txt - name: checkout v0.3.x archive run: | - mkdir ./v0.3.x - cd ./v0.3.x git submodule add https://github.com/databrickslabs/mosaic.git v0.3.x/mosaic git checkout gh-pages-v0.3.x git add --all From 95eedc03b712e109c8ef01bff779504a2caa8063 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:04:55 +0000 Subject: [PATCH 30/44] Add docs archive checkout. --- .github/workflows/docs.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index b03bb615e..d65022232 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,8 +30,7 @@ jobs: run: | git submodule add https://github.com/databrickslabs/mosaic.git v0.3.x/mosaic git checkout gh-pages-v0.3.x - git add --all - ls -la + rm -f .gitmodules git add --all git commit -am "Adding v0.3.x docs" ls -la From 0dfcd958454df1d4fbfa28aa85fdac03e11c04fe Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:22:43 +0000 Subject: [PATCH 31/44] Add docs archive checkout. --- .github/workflows/docs.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d65022232..af1f5452c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -28,11 +28,10 @@ jobs: requirements_path: docs/docs-requirements.txt - name: checkout v0.3.x archive run: | - git submodule add https://github.com/databrickslabs/mosaic.git v0.3.x/mosaic - git checkout gh-pages-v0.3.x - rm -f .gitmodules + git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x git add --all git commit -am "Adding v0.3.x docs" + cd v0.3.x ls -la - name: Push changes uses: ad-m/github-push-action@master From 84ba805157caa216245faef83c8cbbba2029974a Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:33:21 +0000 Subject: [PATCH 32/44] Add docs archive checkout. --- .github/workflows/docs.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index af1f5452c..d1c4e9a8f 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -28,11 +28,14 @@ jobs: requirements_path: docs/docs-requirements.txt - name: checkout v0.3.x archive run: | + git checkout gh-pages-v0.3.x + git pull git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x git add --all git commit -am "Adding v0.3.x docs" cd v0.3.x ls -la + git checkout gh-pages - name: Push changes uses: ad-m/github-push-action@master with: From 778ff5c652753510f9e4e62cd5c6b38858311343 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:36:44 +0000 Subject: [PATCH 33/44] Add docs archive checkout. --- .github/workflows/docs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d1c4e9a8f..4aa493dba 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,6 +30,7 @@ jobs: run: | git checkout gh-pages-v0.3.x git pull + mkdir ./v0.3.x git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x git add --all git commit -am "Adding v0.3.x docs" From 57c1c982e3ffc8993bad886cd37440c640763592 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:41:34 +0000 Subject: [PATCH 34/44] Add docs archive checkout. --- .github/workflows/docs.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 4aa493dba..05e617ccd 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -30,13 +30,12 @@ jobs: run: | git checkout gh-pages-v0.3.x git pull + git checkout gh-pages mkdir ./v0.3.x git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x git add --all git commit -am "Adding v0.3.x docs" - cd v0.3.x ls -la - git checkout gh-pages - name: Push changes uses: ad-m/github-push-action@master with: From fa22eea6d6e515a46ca5a295d83940da744a8d20 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 16:56:19 +0000 Subject: [PATCH 35/44] Add docs archive checkout. --- .github/workflows/docs.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 05e617ccd..5519ecec0 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -27,6 +27,16 @@ jobs: documentation_path: docs/source requirements_path: docs/docs-requirements.txt - name: checkout v0.3.x archive + # Please do not change any step in here, even though it may look hacky + # This is the only way to emulate git archive --remote with actions/checkout + # git checkout gh-pages-v0.3.x is required to have a local branch for archiving + # git pull is optional, but it's a good practice to have the latest version + # git checkout gh-pages right after is required to go back to the working branch + # mkdir ./v0.3.x is required to create a directory for the archive + # git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x is required to extract the archive + # in the right place + # git add --all is required to add the new files to the working branch + # git commit -am "Adding v0.3.x docs" is required to commit the changes run: | git checkout gh-pages-v0.3.x git pull @@ -35,7 +45,6 @@ jobs: git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x git add --all git commit -am "Adding v0.3.x docs" - ls -la - name: Push changes uses: ad-m/github-push-action@master with: From 2f14acde9ce8233795cd8c2caf25f4ad8113a838 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:00:20 +0000 Subject: [PATCH 36/44] Add docs archive checkout. --- .github/workflows/docs.yml | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 6245dbf42..a42524880 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,13 +26,27 @@ jobs: with: documentation_path: docs/source requirements_path: docs/docs-requirements.txt + - name: checkout v0.3.x archive + # Please do not change any step in here, even though it may look hacky + # This is the only way to emulate git archive --remote with actions/checkout + # git checkout gh-pages-v0.3.x is required to have a local branch for archiving + # git pull is optional, but it's a good practice to have the latest version + # git checkout gh-pages right after is required to go back to the working branch + # mkdir ./v0.3.x is required to create a directory for the archive + # git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x is required to extract the archive + # in the right place + # git add --all is required to add the new files to the working branch + # git commit -am "Adding v0.3.x docs" is required to commit the changes + run: | + git checkout gh-pages-v0.3.x + git pull + git checkout gh-pages + mkdir ./v0.3.x + git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x + git add --all + git commit -am "Adding v0.3.x docs" - name: Push changes uses: ad-m/github-push-action@master with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - - name: checkout v0.3.x archive - run: | - mkdir ./v0.3.x - cd ./v0.3.x - git clone -b gh-pages-v0.3.x --single-branch git@github.com:databrickslabs/mosaic.git From 7bc770568349d6f0eec4096997b8d07ba3ffce04 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:04:47 +0000 Subject: [PATCH 37/44] Link archived docs. --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index ee499822e..f0896cfd3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -75,6 +75,7 @@ Documentation usage/usage models/models literature/videos + v0.3.x/index Indices and tables From 0167e51b83828c4c93a55ceff857e8a6554806dc Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:06:22 +0000 Subject: [PATCH 38/44] Link archived docs. --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index ee499822e..f0896cfd3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -75,6 +75,7 @@ Documentation usage/usage models/models literature/videos + v0.3.x/index Indices and tables From dbdb24d37557bd6755f71bafd5d270ac686d72d4 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:13:18 +0000 Subject: [PATCH 39/44] Link archived docs. --- docs/source/index.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index f0896cfd3..7ff8cc7ab 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -60,7 +60,7 @@ Mosaic provides: * optimisations for performing point-in-polygon joins using an approach we co-developed with Ordnance Survey (`blog post `_); and * the choice of a Scala, SQL and Python API. - +For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. Documentation ============= @@ -75,7 +75,6 @@ Documentation usage/usage models/models literature/videos - v0.3.x/index Indices and tables From b843d404dd2a0b466cb96c54110c2e7efda8d9ae Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:37:38 +0000 Subject: [PATCH 40/44] Docs updates for new versions. --- docs/source/index.rst | 47 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index f0896cfd3..0211a6aac 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -60,6 +60,53 @@ Mosaic provides: * optimisations for performing point-in-polygon joins using an approach we co-developed with Ordnance Survey (`blog post `_); and * the choice of a Scala, SQL and Python API. +.. note:: + For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. + + +Version 0.4.0 +============= + +We recommend using Databricks Runtime versions 13.3 LTS with Photon enabled. + +.. warning:: + Mosaic 0.4.x series only supports DBR 13.x DBRs. + If running on a different DBR it will throw an exception: + + **DEPRECATION ERROR: Mosaic v0.4.x series only supports Databricks Runtime 13. You can specify `%pip install 'databricks-mosaic<0.4,>=0.3'` for DBR < 13.** + +As of the 0.4.0 release, Mosaic issues the following ERROR when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION ERROR: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic 0.4.x series restricts executing this cluster.** + +As of Mosaic 0.4.0 (subject to change in follow-on releases) + * No Mosaic SQL expressions cannot yet be registered with `Unity Catalog `_ due to API changes affecting DBRs >= 13. + * `Assigned Clusters `_ : Mosaic Python, R, and Scala APIs. + * `Shared Access Clusters `_ : Mosaic Scala API (JVM) with Admin `allowlisting `_ ; Python bindings to Mosaic Scala APIs are blocked by Py4J Security on Shared Access Clusters. + +.. note:: + As of Mosaic 0.4.0 (subject to change in follow-on releases) + + * `Unity Catalog `_ : Enforces process isolation which is difficult to accomplish with custom JVM libraries; as such only built-in (aka platform provided) JVM APIs can be invoked from other supported languages in Shared Access Clusters. + * `Volumes `_ : Along the same principle of isolation, clusters (both assigned and shared access) can read Volumes via relevant built-in readers and writers or via custom python calls which do not involve any custom JVM code. + + + +Version 0.3.x Series +==================== + +We recommend using Databricks Runtime versions 12.2 LTS with Photon enabled. + +.. warning:: + Mosaic 0.3.x series does not support DBR 13.x DBRs. + +As of the 0.3.11 release, Mosaic issues the following WARNING when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION WARNING: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic will stop working on this cluster after v0.3.x.** +If you are receiving this warning in v0.3.11+, you will want to begin to plan for a supported runtime. The reason we are making this change is that we are streamlining Mosaic internals to be more aligned with future product APIs which are powered by Photon. Along this direction of change, Mosaic has standardized to JTS as its default and supported Vector Geometry Provider. + + + Documentation From f828605162ca50b898a0a034f798b10a1619d746 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 17:38:38 +0000 Subject: [PATCH 41/44] Docs updates for new versions. --- docs/source/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/index.rst b/docs/source/index.rst index 0211a6aac..46398ce4d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -96,6 +96,7 @@ Version 0.3.x Series ==================== We recommend using Databricks Runtime versions 12.2 LTS with Photon enabled. +For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. .. warning:: Mosaic 0.3.x series does not support DBR 13.x DBRs. From 8791ef73ad993974154841aabf1d4b8e9c26fbc8 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 19:00:06 +0000 Subject: [PATCH 42/44] Fix formatting issues. --- python/mosaic/api/raster.py | 4 +++- .../labs/mosaic/functions/MosaicContext.scala | 24 +++++++++---------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/python/mosaic/api/raster.py b/python/mosaic/api/raster.py index d27f669bc..b191ba8d5 100644 --- a/python/mosaic/api/raster.py +++ b/python/mosaic/api/raster.py @@ -927,7 +927,9 @@ def rst_fromcontent(raster: ColumnOrName, driver: ColumnOrName, sizeInMB: Column """ return config.mosaic_context.invoke_function( - "rst_fromcontent", pyspark_to_java_column(raster), pyspark_to_java_column(driver), + "rst_fromcontent", + pyspark_to_java_column(raster), + pyspark_to_java_column(driver), pyspark_to_java_column(sizeInMB) ) diff --git a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala index fd1dd176b..cd516ecc5 100644 --- a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala +++ b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala @@ -711,16 +711,15 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends ColumnAdapter(RST_Tessellate(raster.expr, resolution.expr, expressionConfig)) def rst_tessellate(raster: Column, resolution: Int): Column = ColumnAdapter(RST_Tessellate(raster.expr, lit(resolution).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:Column): Column = + def rst_fromcontent(raster: Column, driver: Column): Column = ColumnAdapter(RST_FromContent(raster.expr, driver.expr, lit(-1).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:Column, sizeInMB:Column): Column = + def rst_fromcontent(raster: Column, driver: Column, sizeInMB: Column): Column = ColumnAdapter(RST_FromContent(raster.expr, driver.expr, sizeInMB.expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:String): Column = + def rst_fromcontent(raster: Column, driver: String): Column = ColumnAdapter(RST_FromContent(raster.expr, lit(driver).expr, lit(-1).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:String, sizeInMB:Int): Column = + def rst_fromcontent(raster: Column, driver: String, sizeInMB: Int): Column = ColumnAdapter(RST_FromContent(raster.expr, lit(driver).expr, lit(sizeInMB).expr, expressionConfig)) - def rst_fromfile(raster: Column): Column = - ColumnAdapter(RST_FromFile(raster.expr, lit(-1).expr, expressionConfig)) + def rst_fromfile(raster: Column): Column = ColumnAdapter(RST_FromFile(raster.expr, lit(-1).expr, expressionConfig)) def rst_fromfile(raster: Column, sizeInMB: Column): Column = ColumnAdapter(RST_FromFile(raster.expr, sizeInMB.expr, expressionConfig)) def rst_fromfile(raster: Column, sizeInMB: Int): Column = @@ -1015,11 +1014,12 @@ object MosaicContext extends Logging { val isML = sparkVersion.contains("-ml-") val isPhoton = sparkVersion.contains("-photon-") - val isTest = ( - dbrMajor == 0 - && !spark.conf.getAll.exists(_._1.startsWith("spark.databricks.clusterUsageTags.")) - ) - + val isTest = + ( + dbrMajor == 0 + && !spark.conf.getAll.exists(_._1.startsWith("spark.databricks.clusterUsageTags.")) + ) + if (dbrMajor != 13 && !isTest) { val msg = """|DEPRECATION ERROR: | Mosaic v0.4.x series only supports Databricks Runtime 13. @@ -1039,7 +1039,7 @@ object MosaicContext extends Logging { logError(msg) println(msg) throw new Exception(msg) - } + } true } From 5a4b7e415b1e9ed50266474791ba3e671de34833 Mon Sep 17 00:00:00 2001 From: "milos.colic" Date: Mon, 15 Jan 2024 22:04:35 +0000 Subject: [PATCH 43/44] Update docs index page. --- docs/source/index.rst | 50 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index 7ff8cc7ab..172396809 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -60,7 +60,55 @@ Mosaic provides: * optimisations for performing point-in-polygon joins using an approach we co-developed with Ordnance Survey (`blog post `_); and * the choice of a Scala, SQL and Python API. -For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. +.. note:: + For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. + + +Version 0.4.0 +============= + +We recommend using Databricks Runtime versions 13.3 LTS with Photon enabled. + +.. warning:: + Mosaic 0.4.x series only supports DBR 13.x DBRs. + If running on a different DBR it will throw an exception: + + **DEPRECATION ERROR: Mosaic v0.4.x series only supports Databricks Runtime 13. You can specify `%pip install 'databricks-mosaic<0.4,>=0.3'` for DBR < 13.** + +As of the 0.4.0 release, Mosaic issues the following ERROR when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION ERROR: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic 0.4.x series restricts executing this cluster.** + +As of Mosaic 0.4.0 (subject to change in follow-on releases) + * No Mosaic SQL expressions cannot yet be registered with `Unity Catalog `_ due to API changes affecting DBRs >= 13. + * `Assigned Clusters `_ : Mosaic Python, R, and Scala APIs. + * `Shared Access Clusters `_ : Mosaic Scala API (JVM) with Admin `allowlisting `_ ; Python bindings to Mosaic Scala APIs are blocked by Py4J Security on Shared Access Clusters. + +.. note:: + As of Mosaic 0.4.0 (subject to change in follow-on releases) + + * `Unity Catalog `_ : Enforces process isolation which is difficult to accomplish with custom JVM libraries; as such only built-in (aka platform provided) JVM APIs can be invoked from other supported languages in Shared Access Clusters. + * `Volumes `_ : Along the same principle of isolation, clusters (both assigned and shared access) can read Volumes via relevant built-in readers and writers or via custom python calls which do not involve any custom JVM code. + + + +Version 0.3.x Series +==================== + +We recommend using Databricks Runtime versions 12.2 LTS with Photon enabled. +For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. + +.. warning:: + Mosaic 0.3.x series does not support DBR 13.x DBRs. + +As of the 0.3.11 release, Mosaic issues the following WARNING when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION WARNING: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic will stop working on this cluster after v0.3.x.** +If you are receiving this warning in v0.3.11+, you will want to begin to plan for a supported runtime. The reason we are making this change is that we are streamlining Mosaic internals to be more aligned with future product APIs which are powered by Photon. Along this direction of change, Mosaic has standardized to JTS as its default and supported Vector Geometry Provider. + + + + Documentation ============= From 81b9352da5c4b7400fe1b4b3e4443f1eab76d42f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 23:31:10 +0000 Subject: [PATCH 44/44] Bump org.apache.maven.plugins:maven-surefire-plugin from 3.2.3 to 3.2.5 Bumps [org.apache.maven.plugins:maven-surefire-plugin](https://github.com/apache/maven-surefire) from 3.2.3 to 3.2.5. - [Release notes](https://github.com/apache/maven-surefire/releases) - [Commits](https://github.com/apache/maven-surefire/compare/surefire-3.2.3...surefire-3.2.5) --- updated-dependencies: - dependency-name: org.apache.maven.plugins:maven-surefire-plugin dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 1283263c9..cdb4c8d2c 100644 --- a/pom.xml +++ b/pom.xml @@ -191,7 +191,7 @@ org.apache.maven.plugins maven-surefire-plugin - 3.2.3 + 3.2.5 true