import com.here.platform.data.client.spark.LayerDataFrameReader.SparkSessionExt
import org.apache.spark.sql.SparkSession
val layerUpdater = sparkSession
.updateLayer(catalogHrn, layerId)
layerUpdater.option("olp.connector.interactive-map-context", "EXTENSION")
val deleteBatchSize = 500
val queryStart = "mt_id=in=("
val query = new StringBuilder().append(queryStart)
val it = oidList.iterator
var total: Long = 0
var i = 0
while (it.hasNext) {
query.append(it.next())
i += 1
if (i >= deleteBatchSize) {
query.append(")")
total += deleteBatchSize
log.info("Removing batch of " + deleteBatchSize + " objects. (" + total + " total elapsed)")
layerUpdater.delete(query.toString())
query.clear()
query.append(queryStart)
i = 0
} else {
if (it.hasNext)
query.append(",")
}
}
if (i > 0) {
query.append(")")
log.info("Removing batch of " + i + " objects.")
total += i
layerUpdater.delete(query.toString())
}
import com.here.hrn.HRN;
import com.here.platform.data.client.spark.LayerUpdater;
import com.here.platform.data.client.spark.javadsl.JavaLayerUpdater;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.spark.sql.SparkSession;
LayerUpdater layerUpdater =
JavaLayerUpdater.create(sparkSession).updateLayer(catalogHrn, layerId);
layerUpdater.option("olp.connector.interactive-map-context", "EXTENSION");
final String queryStart = "mt_id=in=(";
long total = 0;
int i = 0;
Iterator<String> it = oidList.iterator();
StringBuilder query = new StringBuilder(queryStart);
while (it.hasNext()) {
query.append(it.next());
i++;
if (i >= 500) {
query.append(")");
layerUpdater.delete(query.toString());
query = new StringBuilder(queryStart);
total += i;
log.info("Removing batch of " + i + " objects. (" + total + " total elapsed)");
i = 0;
} else {
if (it.hasNext()) query.append(",");
}
}
if (i > 0) {
query.append(")");
total += i;
layerUpdater.delete(query.toString());
}