Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .travis/hbase-install.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#! /bin/bash

if [ ! -f $HOME/downloads/hbase-1.3.0-bin.tar.gz ]; then sudo wget -O $HOME/downloads/hbase-1.3.0-bin.tar.gz http://www-eu.apache.org/dist/hbase/1.3.0/hbase-1.3.0-bin.tar.gz; fi
sudo mv $HOME/downloads/hbase-1.3.0-bin.tar.gz hbase-1.3.0-bin.tar.gz && tar xzf hbase-1.3.0-bin.tar.gz
sudo rm -f hbase-1.3.0/conf/hbase-site.xml && sudo mv .travis/hbase/hbase-site.xml hbase-1.3.0/conf
sudo hbase-1.3.0/bin/start-hbase.sh
if [ ! -f $HOME/downloads/hbase-1.3.1-bin.tar.gz ]; then sudo wget -O $HOME/downloads/hbase-1.3.1-bin.tar.gz http://www-eu.apache.org/dist/hbase/1.3.1/hbase-1.3.1-bin.tar.gz; fi
sudo mv $HOME/downloads/hbase-1.3.1-bin.tar.gz hbase-1.3.1-bin.tar.gz && tar xzf hbase-1.3.1-bin.tar.gz
sudo rm -f hbase-1.3.1/conf/hbase-site.xml && sudo mv .travis/hbase/hbase-site.xml hbase-1.3.1/conf
sudo hbase-1.3.1/bin/start-hbase.sh
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ lazy val commonSettings = Seq(

credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),

addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.3" cross CrossVersion.binary),
addCompilerPlugin("org.spire-math" % "kind-projector" % "0.9.4" cross CrossVersion.binary),
addCompilerPlugin("org.scalamacros" %% "paradise" % "2.1.0" cross CrossVersion.full),

pomExtra := (
Expand Down
14 changes: 7 additions & 7 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ import sbt._
object Dependencies {
val typesafeConfig = "com.typesafe" % "config" % "1.3.1"
val logging = "com.typesafe.scala-logging" %% "scala-logging" % "3.5.0"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.1"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.13.4"
val scalatest = "org.scalatest" %% "scalatest" % "3.0.3"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.13.5"
val jts = "com.vividsolutions" % "jts-core" % "1.14.0"

val monocleCore = "com.github.julien-truffaut" %% "monocle-core" % Version.monocle
Expand All @@ -34,20 +34,20 @@ object Dependencies {

val apacheMath = "org.apache.commons" % "commons-math3" % "3.6.1"

val chronoscala = "jp.ne.opt" %% "chronoscala" % "0.1.2"
val chronoscala = "jp.ne.opt" %% "chronoscala" % "0.1.3"

val awsSdkS3 = "com.amazonaws" % "aws-java-sdk-s3" % "1.11.92"
val awsSdkS3 = "com.amazonaws" % "aws-java-sdk-s3" % "1.11.143"

val scalazStream = "org.scalaz.stream" %% "scalaz-stream" % "0.8.6a"

val sparkCore = "org.apache.spark" %% "spark-core" % Version.spark
val hadoopClient = "org.apache.hadoop" % "hadoop-client" % Version.hadoop

val avro = "org.apache.avro" % "avro" % "1.8.1"
val avro = "org.apache.avro" % "avro" % "1.8.2"

val slickPG = "com.github.tminglei" %% "slick-pg" % "0.14.6"
val slickPG = "com.github.tminglei" %% "slick-pg" % "0.15.0"

val parserCombinators = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.5"
val parserCombinators = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.6"

val jsonSchemaValidator = "com.networknt" % "json-schema-validator" % "0.1.7"
}
4 changes: 2 additions & 2 deletions project/Environment.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ object Environment {
def either(environmentVariable: String, default: String): String =
Properties.envOrElse(environmentVariable, default)

lazy val hadoopVersion = either("SPARK_HADOOP_VERSION", "2.7.3")
lazy val sparkVersion = either("SPARK_VERSION", "2.1.0")
lazy val hadoopVersion = either("SPARK_HADOOP_VERSION", "2.8.0")
lazy val sparkVersion = either("SPARK_VERSION", "2.1.1")
lazy val versionSuffix = either("GEOTRELLIS_VERSION_SUFFIX", "-SNAPSHOT")
}
10 changes: 5 additions & 5 deletions project/Version.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@
object Version {
val geotrellis = "1.2.0" + Environment.versionSuffix
val scala = "2.11.11"
val geotools = "16.1"
val geotools = "17.1"
val sprayJson = "1.3.3"
val monocle = "1.4.0"
val accumulo = "1.7.2"
val cassandra = "3.1.4"
val hbase = "1.3.0"
val geomesa = "1.2.7.3"
val accumulo = "1.7.3"
val cassandra = "3.2.0"
val hbase = "1.3.1"
val geomesa = "1.2.8"
lazy val hadoop = Environment.hadoopVersion
lazy val spark = Environment.sparkVersion
}
2 changes: 1 addition & 1 deletion spark/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name := "geotrellis-spark"
libraryDependencies ++= Seq(
sparkCore % "provided",
hadoopClient % "provided",
"com.google.uzaygezen" % "uzaygezen-core" % "0.2",
"org.locationtech.sfcurve" %% "sfcurve-geowave-index" % "0.2.1-SNAPSHOT",
"org.scalaj" %% "scalaj-http" % "2.3.0",
avro,
spire,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Copyright 2017 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis.spark.io.index

import geotrellis.spark._
import geotrellis.spark.io.index.geowave._

private[index] trait GeowaveKeyIndexMethod

object GeowaveKeyIndexMethod extends GeowaveKeyIndexMethod {

implicit def spatialKeyIndexMethod(m: GeowaveKeyIndexMethod): KeyIndexMethod[SpatialKey] =
new KeyIndexMethod[SpatialKey] {
def createIndex(keyBounds: KeyBounds[SpatialKey]): KeyIndex[SpatialKey] = {
val xBits = resolution(keyBounds.maxKey.col, keyBounds.minKey.col)
val yBits = resolution(keyBounds.maxKey.row, keyBounds.minKey.row)
new GeowaveSpatialKeyIndex(keyBounds, xBits, yBits)
}
}

}
32 changes: 16 additions & 16 deletions spark/src/main/scala/geotrellis/spark/io/index/MergeQueue.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ object MergeQueue{
class MergeQueue(initialSize: Int = 1) {
private var array = if(initialSize <= 1) { Array.ofDim[(Long, Long)](1) } else { Array.ofDim[(Long, Long)](initialSize) }
private var _size = 0

def size = _size

private def removeElement(i: Int): Unit = {
if(i < _size - 1) {
val result = array.clone
Expand All @@ -39,7 +39,7 @@ class MergeQueue(initialSize: Int = 1) {
}
_size = _size - 1
}

private def insertElement(range: (Long, Long), i: Int): Unit = {
ensureSize(_size + 1)
if(i == _size) {
Expand All @@ -53,8 +53,8 @@ class MergeQueue(initialSize: Int = 1) {
}
_size += 1
}


/** Ensure that the internal array has at least `n` cells. */
protected def ensureSize(n: Int) {
// Use a Long to prevent overflows
Expand All @@ -66,15 +66,15 @@ class MergeQueue(initialSize: Int = 1) {
}
// Clamp newSize to Int.MaxValue
if (newSize > Int.MaxValue) newSize = Int.MaxValue

val newArray: Array[(Long, Long)] = new Array(newSize.toInt)
scala.compat.Platform.arraycopy(array, 0, newArray, 0, _size)
array = newArray
}
}

val ordering = implicitly[Ordering[(Long, Long)]]

/** Inserts a single range into the priority queue.
*
* @param range the element to insert.
Expand All @@ -86,25 +86,25 @@ class MergeQueue(initialSize: Int = 1) {
val i = -(res + 1)
var (thisStart, thisEnd) = range
var removeLeft = false

var removeRight = false
var rightRemainder: Option[(Long, Long)] = None

// Look at the left range
if(i != 0) {
val (prevStart, prevEnd) = array(i - 1)
if(prevStart == thisStart) {
removeLeft = true
}
if (prevEnd + 1 >= thisStart) {
if (prevEnd + 1 >= thisStart) {
removeLeft = true
thisStart = prevStart
if(prevEnd > thisEnd) {
thisEnd = prevEnd
}
}
}

// Look at the right range
if(i < _size && _size > 0) {
val (nextStart, nextEnd) = array(i)
Expand All @@ -123,8 +123,8 @@ class MergeQueue(initialSize: Int = 1) {
}
}
}
if(removeRight) {

if(removeRight) {
if(!removeLeft) {
array(i) = (thisStart, thisEnd)
} else {
Expand All @@ -136,14 +136,14 @@ class MergeQueue(initialSize: Int = 1) {
} else {
insertElement(range, i)
}

rightRemainder match {
case Some(r) => this += r
case None =>
}
}
}

def toSeq: Seq[(Long, Long)] = {
val result = Array.ofDim[(Long, Long)](size)
System.arraycopy(array, 0, result, 0, size)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
/*
* Copyright 2017 Azavea
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package geotrellis.spark.io.index.geowave

import geotrellis.spark._
import geotrellis.spark.io.index.KeyIndex

import mil.nga.giat.geowave.core.index._
import mil.nga.giat.geowave.core.index.dimension._
import mil.nga.giat.geowave.core.index.sfc.data.{ BasicNumericDataset, NumericRange }
import mil.nga.giat.geowave.core.index.sfc.SFCDimensionDefinition
import mil.nga.giat.geowave.core.index.sfc.SFCFactory.SFCType
import mil.nga.giat.geowave.core.index.sfc.tiered.{ TieredSFCIndexFactory, TieredSFCIndexStrategy }

import scala.collection.JavaConverters._


object GeowaveSpatialKeyIndex {
def apply(minKey: SpatialKey, maxKey: SpatialKey, spatialResolution: Int): GeowaveSpatialKeyIndex =
apply(new KeyBounds(minKey, maxKey), spatialResolution)

def apply(keyBounds: KeyBounds[SpatialKey], spatialResolution: Int): GeowaveSpatialKeyIndex =
apply(keyBounds, spatialResolution, spatialResolution)

def apply(keyBounds: KeyBounds[SpatialKey], xResolution: Int, yResolution: Int): GeowaveSpatialKeyIndex =
new GeowaveSpatialKeyIndex(keyBounds, xResolution, yResolution)
}

/**
* Class that provides spatial indexing using the GeoWave indexing
* machinery.
*
* @param keyBounds The bounds over-which the index is valid
* @param xResolution The number of bits of resolution requested/required by the x-axis
* @param yResoltuion The number of bits of resolution requested/required by the y-axis
* @author James McClain
*/
class GeowaveSpatialKeyIndex(val keyBounds: KeyBounds[SpatialKey], val xResolution: Int, val yResolution: Int) extends KeyIndex[SpatialKey] {

val maxRangeDecomposition = 5000

val KeyBounds(SpatialKey(minCol, minRow), SpatialKey(maxCol, maxRow)) = keyBounds
@transient lazy val dim1 = new SFCDimensionDefinition(new BasicDimensionDefinition(minCol, maxCol), xResolution)
@transient lazy val dim2 = new SFCDimensionDefinition(new BasicDimensionDefinition(minRow, maxRow), yResolution)
@transient lazy val dimensions: Array[SFCDimensionDefinition] = Array(dim1, dim2)
@transient lazy val strategy: TieredSFCIndexStrategy = TieredSFCIndexFactory.createSingleTierStrategy(dimensions, SFCType.HILBERT)

// Arrays SEEM TO BE big endian
private def idToLong(id: Array[Byte]): Long = {
id.take(8).foldLeft(0L)({ (accumulator, value) => (accumulator << 8) + value.toLong })
}

// ASSUMED to be used for insertion
def toIndex(key: SpatialKey): Long = {

val SpatialKey(col, row) = key
val range1 = new NumericRange(col, col) // XXX
val range2 = new NumericRange(row, row) // XXX
val multiRange = new BasicNumericDataset(Array(range1, range2))
val insertionIds = strategy.getInsertionIds(multiRange)

assert(insertionIds.size() == 1)
idToLong(insertionIds.get(0).getBytes())
}

// ASSUMED to be used for queries
def indexRanges(keyRange: (SpatialKey, SpatialKey)): Seq[(Long, Long)] = {
val (SpatialKey(col1, row1), SpatialKey(col2, row2)) = keyRange
val minCol = math.min(col1, col2)
val maxCol = math.max(col1, col2)
val minRow = math.min(row1, row2)
val maxRow = math.max(row1, row2)

val range1 = new NumericRange(minCol, maxCol) // XXX
val range2 = new NumericRange(minRow, maxRow) // XXX
val multiRange = new BasicNumericDataset(Array(range1, range2))
val queryRanges = strategy.getQueryRanges(multiRange, maxRangeDecomposition)

queryRanges
.asScala
.map({ range: ByteArrayRange =>
val start = range.getStart()
val end = range.getEnd()
(idToLong(start.getBytes()), idToLong(end.getBytes()))
})
}
}
Loading