Skip to content

Commit

Permalink
Test fixups.
Browse files Browse the repository at this point in the history
  • Loading branch information
danscales committed Jan 16, 2025
1 parent 3a500db commit 16e13ad
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ package org.globalforestwatch.layers
import org.globalforestwatch.grids.GridTile

/** Parameterized layer for all the various Mapspam commodity yield datasets.
* 'commodity' should be one of the four-letter uppercase Mapspam commodity codes,
* such as 'COCO' or 'OILP'.
* 'commodity' should be one of the four-letter uppercase Mapspam commodity codes,
* such as 'COCO' or 'OILP'.
*/
case class MapspamYield(commodity: String, gridTile: GridTile, kwargs: Map[String, Any])
extends FloatLayer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,16 @@ trait JobError

case class RasterReadError(msg: String) extends JobError
case class GeometryError(msg: String) extends JobError

/* Error indicating that the location did not intersect the centroid of any
* raster pixels, hence there are no results. */
case object NoIntersectionError extends JobError
case class NoYieldException(msg: String) extends Exception(msg)

/* Error and exception indicating that no yield could be determined for the specified
* location for use in GHG analysis */
case class NoYieldError(msg: String) extends JobError
case class NoYieldException(msg: String) extends Exception(msg)

case class MultiError(errors: Set[String]) extends JobError {
def addError(err: JobError): MultiError = MultiError(errors + err.toString)
def addError(other: MultiError): MultiError = MultiError(errors ++ other.errors)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,11 @@ object SummaryMain {

final def main(args: Array[String]): Unit = {
// Print out environment variables (if needed for debugging)
//val environmentVars = System.getenv().forEach {
// case (key, value) => println(s"$key = $value")
//}

if (false) {
val environmentVars = System.getenv().forEach {
case (key, value) => println(s"$key = $value")
}
}
command.parse(args, sys.env) match {
case Left(help) =>
System.err.println(help)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
list_id location_id status_code location_error total_area emissions_factor_yearly
166 -1 2 1006.2585 {"2020":0.1011,"2021":0.1023,"2022":0.1033,"2023":0.1032}
2 1 2 19068.2043 {"2020":0.1525,"2021":0.1678,"2022":0.1756,"2023":0.184}
176 1 2 71923.9299 {"2020":0.2941,"2021":0.3003,"2022":0.3073,"2023":0.3131}
1 3 2 1.2978 {"2020":0.1177,"2021":0.1087,"2022":0.1445,"2023":0.1347}
1036 14697 2 3.826 {"2020":0.0,"2021":0.0,"2022":0.0,"2023":0.0}
166 1 2 1006.2585 {"2020":0.1011,"2021":0.1023,"2022":0.1033,"2023":0.1032}
Expand Down
9 changes: 1 addition & 8 deletions src/test/resources/ghg.tsv
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,4 @@ list_id location_id geom commodity yield
166 1 01030000000100000005000000e7aed1e4156e5840d583f1ef0d6706405123d124216e5840bb5dcfdae43506403dbcd0243d70584057ee7202b43806406a50d1e404705840f75140ece2700640e7aed1e4156e5840d583f1ef0d670640 OILP 0.0
166 -1 01030000000100000005000000e7aed1e4156e5840d583f1ef0d6706405123d124216e5840bb5dcfdae43506403dbcd0243d70584057ee7202b43806406a50d1e404705840f75140ece2700640e7aed1e4156e5840d583f1ef0d670640 OILP 0.0
1036 14697 0103000000010000007B0000002387889B534914C09128B4ACFB071840CD902A8A574914C09702D2FE07081840A5828AAA5F4914C0B3D0CE69160818409A081B9E5E4914C041800C1D3B08184023A46E675F4914C09C508880430818406C3D4338664914C05DA5BBEB6C081840508C2C99634914C0524832AB770818408F8EAB915D4914C00723F609A0081840172AFF5A5E4914C05D5320B3B3081840A08B868C474914C079043752B6081840CEFFAB8E1C4914C002A08A1BB708184012BF620D174914C0F6251B0FB6081840F0164850FC4814C0BDE0D39CBC081840A0C03BF9F44814C04B395FECBD081840CE8B135FED4814C06EA7AD11C10818402905DD5ED24814C06EA7AD11C1081840BDFDB968C84814C07364E597C10818403BC8EBC1A44814C0C39D0B23BD0818409624CFF57D4814C051F69672BE0818404C6E14596B4814C018B14F00C50818407A1C06F3574814C0849B8C2AC308184096B036C64E4814C01E51A1BAB9081840D595CFF23C4814C0C9207711A608184096766A2E374814C079CA6ABA9E081840583A1F9E254814C0F111312592081840410C74ED0B4814C09067976F7D081840A2427573F14714C0C98FF8156B0818401F477364E54714C058CB9D9960081840A2258FA7E54714C09010E50B5A08184091EEE714E44714C0D5CF9B8A54081840B33F506EDB4714C096B036C64E0818405E2C0C91D34714C05D4E098849081840807D74EACA4714C07AE2395B40081840200DA7CCCD4714C06F2EFEB6270818406F46CD57C94714C02578431A15081840C53C2B69C54714C003D0285DFA0718408600E0D8B34714C03CF88903E8071840BF45274BAD4714C0F8FE06EDD50718402B306475AB4714C020D3DA34B607184009DFFB1BB44714C0BA4E232D95071840533E0455A34714C0BFD18E1B7E07184004E8F7FD9B4714C05F44DB31750718408C834BC79C4714C0D1CE691668071840D0420246974714C0322251685907184098E0D407924714C00F971C774A0718407B2FBE688F4714C0AFEC82C135071840530438BD8B4714C05F96766A2E0718408195438B6C4714C0E29178793A071840CB113290674714C0A94C3107410718403DB9A640664714C02C2B4D4A41071840C0401020434714C065AA605452071840E29178793A4714C020EBA9D557071840CB80B3942C4714C0AF264F594D0718402CF180B2294714C0107A36AB3E07184015FDA199274714C05AF624B0390718408D614ED0264714C0541C075E2D071840C0CC77F0134714C093E4B9BE0F071840AF95D05D124714C038143E5B070718408D614ED0264714C054A86E2EFE061840F96871C6304714C076F9D687F5061840D15AD1E6384714C049111956F106184076FEEDB25F4714C00BD5CDC5DF06184042B0AA5E7E4714C016325706D5061840530438BD8B4714C071E5EC9DD10618403C4A253CA14714C082FFAD64C70618405ED5592DB04714C0B587BD50C0061840809A5AB6D64714C0B0AD9FFEB3061840361E6CB1DB4714C0111E6D1CB1061840F855B950F94714C0A5164A26A70618406FD74B53044814C082A8FB00A4061840B3B3E89D0A4814C04489963C9E0618404DA3C9C5184814C08E05854199061840083BC5AA414814C0333509DE90061840C93846B2474814C077F4BF5C8B061840FDFA2136584814C0994528B682061840D5EC8156604814C08351499D8006184046EBA86A824814C07D772B4B74061840A7785C548B4814C03F58C6866E0618400249D8B7934814C02D211FF46C061840B20FB22C984814C09A0B5C1E6B061840E0F76F5E9C4814C077BAF3C4730618408A01124DA04814C011AAD4EC81061840BE892139994814C0A5DC7D8E8F061840D4601A868F4814C04A29E8F6920618409BFEEC478A4814C0A43330F2B2061840DA006C40844814C0E90FCD3CB9061840639CBF09854814C02272FA7ABE0618407F4DD6A8874814C05AF10D85CF061840249A40118B4814C0E38C614ED0061840139D6516A14814C0E8667FA0DC0618405D19541B9C4814C0C075C58CF00618400266BE839F4814C08D0A9C6C0307184079E75086AA4814C0CC29013109071840F6251B0FB64814C0AABBB20B06071840ACA92C0ABB4814C0E200FA7DFF061840F65FE7A6CD4814C0C632FD12F106184001DA56B3CE4814C0AA81E673EE0618402448A5D8D14814C0994A3FE1EC061840E57FF277EF4814C00AF2B391EB061840A0C03BF9F44814C0F4FDD478E9061840406D54A7034914C065A54929E80618406D72F8A4134914C0F9BA0CFFE906184089230F44164914C021E692AAED061840FBE769C0204914C0F41ABB44F5061840FBE769C0204914C0AF5B04C6FA06184051DEC7D11C4914C093C7D3F203071840BDC804FC1A4914C0BB0F406A1307184067D2A6EA1E4914C0384E0AF31E071840E4F38AA71E4914C070B03731240718407EC68503214914C0A912656F29071840A034D428244914C03788D68A360718408940F50F224914C07C8159A1480718406D8FDE701F4914C026C5C7276407184001A5A146214914C0988922A46E071840ABAE4335254914C0876F61DD780718400C3CF71E2E4914C04E4700378B0718408F1A13622E4914C086A92D7590071840B1886187314914C004E8F7FD9B071840459E245D334914C075AC527AA6071840CE397826344914C0147651F4C0071840FB213658384914C097715303CD07184045BB0A293F4914C091D10149D80718402387889B534914C09128B4ACFB071840 RUBB 0.0








176 1 010300000001000000050000000ccc88643be948c0c807b0ae5b9621c00ccc88643be948c0984e5d3c8a0422c0d8bc094710c648c0984e5d3c8a0422c0d8bc094710c648c0c807b0ae5b9621c00ccc88643be948c0c807b0ae5b9621c0 NONE 1500
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package org.globalforestwatch.summarystats.ghg

import cats.data.NonEmptyList
import com.github.mrpowers.spark.fast.tests.DataFrameComparer
import com.github.mrpowers.spark.daria.sql.transformations.sortColumns
import geotrellis.vector._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode,Row}
Expand All @@ -16,7 +15,7 @@ import org.apache.spark.broadcast.Broadcast

class GHGAnalysisSpec extends TestEnvironment with DataFrameComparer {
def ghgInputTsvPath = getClass.getResource("/ghg.tsv").toString()
def backupYield = getClass.getResource("/part_yield_spam_gadm2.csv").toString()
def gadm2YieldPath = getClass.getResource("/part_yield_spam_gadm2.csv").toString()
def ghgExpectedOutputPath = getClass.getResource("/ghg-output").toString()

def Ghg(features: RDD[ValidatedLocation[Geometry]], broadcastArray: Broadcast[Array[Row]]) = {
Expand Down Expand Up @@ -47,18 +46,18 @@ class GHGAnalysisSpec extends TestEnvironment with DataFrameComparer {
csvFile.withColumn("status_code", col("status_code").cast(IntegerType))
}

it("matches recorded output for dashboard for vector gadm", ProTag) {
val featureLoc31RDD = ValidatedFeatureRDD(
it("matches recorded output for various locations and commodities", ProTag) {
val ghgFeatures = ValidatedFeatureRDD(
NonEmptyList.one(ghgInputTsvPath),
"gfwpro_ext",
FeatureFilter.empty,
splitFeatures = true
)
val backupDF = spark.read
.options(Map("header" -> "true", "delimiter" -> ",", "escape" -> "\""))
.csv(backupYield)
.csv(gadm2YieldPath)
val broadcastArray = spark.sparkContext.broadcast(backupDF.collect())
val fcd = Ghg(featureLoc31RDD, broadcastArray)
val fcd = Ghg(ghgFeatures, broadcastArray)
val summaryDF = GHGDF.getFeatureDataFrame(fcd, spark)
summaryDF.collect().foreach(println)
//saveExpectedFcdResult(summaryDF)
Expand Down

0 comments on commit 16e13ad

Please sign in to comment.