In [37]:
import sys.process._
import geotrellis.proj4.CRS
import geotrellis.raster.io.geotiff.writer.GeoTiffWriter
import geotrellis.raster.io.geotiff.{SinglebandGeoTiff, _}
import geotrellis.raster.{CellType, DoubleArrayTile}
import geotrellis.spark.io.hadoop._
import geotrellis.vector.{Extent, ProjectedExtent}
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
//Spire is a numeric library for Scala which is intended to be generic, fast, and precise.
import spire.syntax.cfor._
In [40]:
var projected_extent = new ProjectedExtent(new Extent(0,0,0,0), CRS.fromName("EPSG:3857"))
var num_cols_rows :(Int, Int) = (0, 0)
//var cellT :CellType = CellType.fromName("uint8raw")
var grid0: RDD[(Long, Double)] = sc.emptyRDD
//Single band GeoTiff
val filepath = "hdfs:///user/hadoop/spring-index/LastFreeze/1980.tif"
//Since it is a single GeoTiff, it will be a RDD with a tile.
val tiles_RDD = sc.hadoopGeoTiffRDD(filepath).values
val grids_RDD = tiles_RDD.map(m => m.toArrayDouble())
val extents_withIndex = sc.hadoopGeoTiffRDD(filepath).keys.zipWithIndex().map{case (e,v) => (v,e)}
projected_extent = (extents_withIndex.filter(m => m._1 == 0).values.collect())(0)
val tiles_withIndex = tiles_RDD.zipWithIndex().map{case (e,v) => (v,e)}
val tile0 = (tiles_withIndex.filter(m => m._1==0).values.collect())(0)
num_cols_rows = (tile0.cols,tile0.rows)
val cellT = tile0.cellType
val bands_withIndex = grids_RDD.zipWithIndex().map { case (e, v) => (v, e) }
grid0 = bands_withIndex.filter(m => m._1 == 0).values.flatMap( m => m).zipWithIndex.map{case (v,i) => (i,v)}
Out[40]:
In [42]:
val clone_tile = DoubleArrayTile(grid0.values.collect(), num_cols_rows._1, num_cols_rows._2)
val cloned = geotrellis.raster.ArrayTile.empty(cellT, num_cols_rows._1, num_cols_rows._2)
cfor(0)(_ < num_cols_rows._1, _ + 1) { col =>
cfor(0)(_ < num_cols_rows._2, _ + 1) { row =>
val v = clone_tile.getDouble(col, row)
cloned.setDouble(col, row, v)
}
}
val geoTif = new SinglebandGeoTiff(cloned, projected_extent.extent, projected_extent.crs, Tags.empty, GeoTiffOptions.DEFAULT)
Out[42]:
In [43]:
val output = "/user/pheno/spring-index/LastFreeze/1980_clone.tif"
val tmp_output = "/tmp/1980_clone.tif"
GeoTiffWriter.write(geoTif, tmp_output)
Out[43]:
In [45]:
val cmd = "hadoop dfs -copyFromLocal -f " + tmp_output + " " + output
Process(cmd)!
Out[45]:
In [ ]: