diff --git a/.travis.yml b/.travis.yml
index 2a0a05b..3af13ce 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -24,6 +24,9 @@ script: |
mvn clean verify --settings maven-settings.xml
fi
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
+
# Secure envs are OSSRH_JIRA_USERNAME, OSSRH_JIRA_PASSWORD, GPG_KEY_NAME, GPG_PASSPHRASE
env:
global:
diff --git a/pom.xml b/pom.xml
index 0109b89..828ab63 100644
--- a/pom.xml
+++ b/pom.xml
@@ -135,6 +135,26 @@
+
+
+ org.jacoco
+ jacoco-maven-plugin
+ 0.7.9
+
+
+
+ prepare-agent
+
+
+
+ report
+ test
+
+ report
+
+
+
+
@@ -143,10 +163,18 @@
+
+
+ io.rest-assured
+ rest-assured
+ 3.0.3
+ test
+
+
junit
junit
- 3.8.1
+ 4.12
test
@@ -155,6 +183,12 @@
trove4j
3.0.3
+
+
+ org.hamcrest
+ java-hamcrest
+ 2.0.0.0
+
com.beust
jcommander
diff --git a/src/main/java/com/conveyal/osmlib/PostgresOSMSource.java b/src/main/java/com/conveyal/osmlib/PostgresOSMSource.java
index 946e1ad..4fac5cc 100644
--- a/src/main/java/com/conveyal/osmlib/PostgresOSMSource.java
+++ b/src/main/java/com/conveyal/osmlib/PostgresOSMSource.java
@@ -1,26 +1,16 @@
package com.conveyal.osmlib;
-import ch.qos.logback.core.db.ConnectionSource;
-import ch.qos.logback.core.recovery.ResilientFileOutputStream;
-import com.google.common.primitives.Longs;
-import gnu.trove.iterator.TLongIterator;
-import gnu.trove.list.TLongList;
-import gnu.trove.list.array.TLongArrayList;
import gnu.trove.set.TLongSet;
import gnu.trove.set.hash.TLongHashSet;
-import org.mapdb.Fun;
-import org.mapdb.Fun.Tuple3;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
-import java.awt.font.FontRenderContext;
-import java.io.IOException;
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
import java.util.Arrays;
-import java.util.NavigableSet;
-import java.util.Set;
-import java.util.stream.Collectors;
/**
* An OSM source that pulls OSM entities out of a Postgres database, within a geographic window.
@@ -94,7 +84,7 @@ private void processNodes () throws Exception {
// You can see the difference between the two, that the nodes extend outside the bounding box in version A.
// Adding the distinct keyword here lets the DB server do the filtering, but may be problematic on larger extracts.
final String sqlA = "select node_id, lat, lon, tags from" +
- "(select unnest(nodes) as node_id from ways " +
+ "(select unnest(node_ids) as node_id from ways " +
"where rep_lat > ? and rep_lat < ? and rep_lon > ? and rep_lon < ? and tags like '%highway=%')" +
"included_nodes join nodes using (node_id)";
PreparedStatement preparedStatement = connection.prepareStatement(sqlA);
@@ -124,7 +114,7 @@ private void processNodes () throws Exception {
}
private void processWays () throws Exception {
- final String sql = "select way_id, tags, nodes " +
+ final String sql = "select way_id, tags, node_ids " +
"from ways " +
"where rep_lat > ? and rep_lat < ? and rep_lon > ? and rep_lon < ? " +
"and tags like '%highway=%'";
diff --git a/src/main/java/com/conveyal/osmlib/PostgresSink.java b/src/main/java/com/conveyal/osmlib/PostgresSink.java
index bee075f..1cb8f1e 100644
--- a/src/main/java/com/conveyal/osmlib/PostgresSink.java
+++ b/src/main/java/com/conveyal/osmlib/PostgresSink.java
@@ -6,14 +6,12 @@
import org.slf4j.LoggerFactory;
import java.io.*;
-import java.nio.ByteBuffer;
-import java.nio.CharBuffer;
-import java.nio.charset.*;
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
import java.util.Arrays;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import java.util.stream.Collector;
import java.util.stream.Collectors;
/**
@@ -57,7 +55,7 @@ public void writeBegin() throws IOException {
// We use column names node_id, way_id, relation_id instead of just id to facilitate joins.
statement.execute("create table updates (time timestamp, replication_epoch bigint, operation varchar)");
statement.execute("create table nodes (node_id bigint, lat float(9), lon float(9), tags varchar)");
- statement.execute("create table ways (way_id bigint, tags varchar, nodes bigint array)");
+ statement.execute("create table ways (way_id bigint, tags varchar, node_ids bigint array)");
statement.execute("create table relations (relation_id bigint, tags varchar)");
statement.execute("create table relation_members (relation_id bigint, type varchar, member_id bigint, role varchar)");
connection.commit();
@@ -246,7 +244,11 @@ public void writeEnd() throws IOException {
// statement.execute("create index on nodes(lat, lon)");
LOG.info("Assigning representative coordinates to ways...");
statement.execute("alter table ways add column rep_lat float(9), add column rep_lon float(9)");
- statement.execute("update ways set (rep_lat, rep_lon) = (select lat, lon from nodes where nodes.node_id = nodes[array_length(nodes, 1)/2])");
+ // use a subquery because a previous statement without a subquery was failing on travis for unknown reasons
+ statement.execute("update ways " +
+ "set rep_lat=subq.lat, rep_lon=subq.lon " +
+ "from (select lat, lon, node_id FROM nodes) as subq " +
+ "where subq.node_id = ways.node_ids[array_length(ways.node_ids, 1)/2]");
LOG.info("Indexing representative coordinates of ways...");
statement.execute("create index on ways(rep_lat, rep_lon)");
connection.commit();
diff --git a/src/main/java/com/conveyal/osmlib/VanillaExtract.java b/src/main/java/com/conveyal/osmlib/VanillaExtract.java
index e286da6..1f42cf7 100644
--- a/src/main/java/com/conveyal/osmlib/VanillaExtract.java
+++ b/src/main/java/com/conveyal/osmlib/VanillaExtract.java
@@ -6,16 +6,11 @@
import org.apache.commons.dbcp2.PoolingDataSource;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.glassfish.grizzly.http.Method;
-import org.glassfish.grizzly.http.server.HttpHandler;
-import org.glassfish.grizzly.http.server.HttpServer;
-import org.glassfish.grizzly.http.server.NetworkListener;
-import org.glassfish.grizzly.http.server.Request;
-import org.glassfish.grizzly.http.server.Response;
+import org.glassfish.grizzly.http.server.*;
import org.glassfish.grizzly.http.util.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.sql.ConnectionPoolDataSource;
import javax.sql.DataSource;
import java.io.IOException;
import java.io.OutputStream;
@@ -60,7 +55,19 @@ public static void main(String[] args) {
//
// Thread updateThread = Updater.spawnUpdateThread(osm);
- DataSource dataSource = createDataSource(args[0], null, null);
+ HttpServer httpServer = startServer(args[0]);
+ if (httpServer.isStarted()) {
+ try {
+ Thread.currentThread().join();
+ } catch (InterruptedException ie) {
+ LOG.info("Interrupted, shutting down.");
+ }
+ }
+ httpServer.shutdown();
+ }
+
+ public static HttpServer startServer(String jdbcUrl) {
+ DataSource dataSource = createDataSource(jdbcUrl, null, null);
LOG.info("Starting VEX HTTP server on port {} of interface {}", PORT, BIND_ADDRESS);
HttpServer httpServer = new HttpServer();
@@ -70,17 +77,12 @@ public static void main(String[] args) {
httpServer.getServerConfiguration().addHttpHandler(new VexHttpHandler(dataSource), "/*");
try {
httpServer.start();
- LOG.info("VEX server running.");
- Thread.currentThread().join();
-// updateThread.interrupt();
} catch (BindException be) {
LOG.error("Cannot bind to port {}. Is it already in use?", PORT);
} catch (IOException ioe) {
LOG.error("IO exception while starting server.");
- } catch (InterruptedException ie) {
- LOG.info("Interrupted, shutting down.");
}
- httpServer.shutdown();
+ return httpServer;
}
// Planet files are named planet-150504.osm.pbf (YYMMDD format)
@@ -113,7 +115,6 @@ public void service(Request request, Response response) throws Exception {
response.setContentType("application/osm");
String uri = request.getDecodedRequestURI();
int suffixIndex = uri.lastIndexOf('.');
- String fileType = uri.substring(suffixIndex);
OutputStream outStream = response.getOutputStream();
try {
String[] coords = uri.substring(1, suffixIndex).split("[,;]");
diff --git a/src/test/java/com/conveyal/osmlib/PostgresTest.java b/src/test/java/com/conveyal/osmlib/PostgresTest.java
new file mode 100644
index 0000000..4e74ebb
--- /dev/null
+++ b/src/test/java/com/conveyal/osmlib/PostgresTest.java
@@ -0,0 +1,75 @@
+package com.conveyal.osmlib;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.sql.*;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.is;
+
+/**
+ * Tests that verify osm data can be saved and queried from postgres
+ */
+public class PostgresTest {
+
+ /**
+ * Test that this library can load data into postgres and that the count of records matches expectations
+ */
+ @Test
+ public void canLoadFromFileIntoDatabase() throws Exception {
+ String newDBName = TestUtils.generateNewDB();
+ if (newDBName == null) {
+ throw new Exception("failed to generate test db");
+ }
+ try {
+ String jdbcUrl = "jdbc:postgresql://localhost/" + newDBName;
+ String[] args = {
+ "./src/test/resources/bangor_maine.osm.pbf",
+ jdbcUrl
+ };
+
+ // perform file to postgres load
+ PostgresSink.main(args);
+
+ // verify that data was loaded into postgres
+ try {
+ Connection connection = DriverManager.getConnection(jdbcUrl);
+ TableTestCase[] tables = {
+ new TableTestCase("nodes", 35747),
+ new TableTestCase("relation_members", 435),
+ new TableTestCase("relations", 34),
+ new TableTestCase("updates", 0),
+ new TableTestCase("ways", 2976)
+ };
+
+ for (TableTestCase table : tables) {
+ PreparedStatement preparedStatement = connection.prepareStatement("Select count(*) from " + table.tableName);
+ preparedStatement.execute();
+ ResultSet resultSet = preparedStatement.getResultSet();
+ resultSet.next();
+ int numNodes = resultSet.getInt(1);
+ assertThat(numNodes, is(table.expectedNumNodes));
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ Assert.fail();
+ }
+ } finally {
+ TestUtils.dropDB(newDBName);
+ }
+ }
+}
+
+/**
+ * Helper class to iterate through when testing whether the proper amount of records got loaded into a particular table.
+ */
+class TableTestCase {
+ String tableName;
+ int expectedNumNodes;
+
+ public TableTestCase(String tableName, int expectedNumNodes) {
+ this.tableName = tableName;
+ this.expectedNumNodes = expectedNumNodes;
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/com/conveyal/osmlib/TestUtils.java b/src/test/java/com/conveyal/osmlib/TestUtils.java
new file mode 100644
index 0000000..ecfab5d
--- /dev/null
+++ b/src/test/java/com/conveyal/osmlib/TestUtils.java
@@ -0,0 +1,97 @@
+package com.conveyal.osmlib;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class TestUtils {
+
+ private static final Logger LOG = LoggerFactory.getLogger(TestUtils.class);
+ private static final AtomicInteger UNIQUE_ID = new AtomicInteger(0);
+ private static String pgUrl = "jdbc:postgresql://localhost/postgres";
+
+ /**
+ * Forcefully drops a database even if other users are connected to it.
+ *
+ * @param dbName
+ */
+ public static void dropDB(String dbName) {
+ // first, terminate all other user sessions
+ executeAndClose("SELECT pg_terminate_backend(pg_stat_activity.pid) " +
+ "FROM pg_stat_activity " +
+ "WHERE pg_stat_activity.datname = '" + dbName + "' " +
+ "AND pid <> pg_backend_pid()");
+ // drop the db
+ executeAndClose("DROP DATABASE " + dbName);
+ }
+
+ /**
+ * Boilerplate for opening a connection, executing a statement and closing connection.
+ *
+ * @param statement
+ * @return true if everything worked.
+ */
+ private static boolean executeAndClose(String statement) {
+ Connection connection;
+ try {
+ connection = DriverManager.getConnection(pgUrl);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ LOG.error("Error connecting to the database!");
+ return false;
+ }
+
+ try {
+ connection.prepareStatement(statement).execute();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ LOG.error("Error excecuting sql statement!");
+ return false;
+ }
+
+ try {
+ connection.close();
+ return true;
+ } catch (SQLException e) {
+ e.printStackTrace();
+ LOG.error("Error closing connection!");
+ return false;
+ }
+ }
+
+ /**
+ * Generate a new database for isolating a test.
+ *
+ * @return The name of the name database, or null if creation unsucessful
+ */
+ public static String generateNewDB() {
+ String newDBName = uniqueString();
+ if (executeAndClose("CREATE DATABASE " + newDBName)) {
+ return newDBName;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Helper to return the relative path to a test resource file
+ *
+ * @param fileName
+ * @return
+ */
+ public static String getResourceFileName(String fileName) {
+ return "./src/test/resources/" + fileName;
+ }
+
+ /**
+ * Generate a unique string. Mostly copied from the uniqueId method of https://github.com/javadev/underscore-java
+ */
+ public static String uniqueString() {
+ return "test_db_" + UNIQUE_ID.incrementAndGet();
+ }
+}
+
diff --git a/src/test/java/com/conveyal/osmlib/VanillaExtractTest.java b/src/test/java/com/conveyal/osmlib/VanillaExtractTest.java
new file mode 100644
index 0000000..55c4b5a
--- /dev/null
+++ b/src/test/java/com/conveyal/osmlib/VanillaExtractTest.java
@@ -0,0 +1,81 @@
+package com.conveyal.osmlib;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static com.conveyal.osmlib.TestUtils.dropDB;
+import static com.conveyal.osmlib.TestUtils.generateNewDB;
+import static io.restassured.RestAssured.given;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.*;
+
+/**
+ * Suite of tests to test the api of the VanillaExtract server
+ */
+public class VanillaExtractTest {
+ static final String TEST_FILE = "./src/test/resources/porto_portugal.osm.pbf";
+
+ private static String testDBName;
+
+ /**
+ * Drops the database made for this suite of tests
+ */
+ @AfterClass
+ public static void tearDown() {
+ dropDB(testDBName);
+ }
+
+ /**
+ * Creates a new database, loads in some osm data into a Postgres Sink and then starts the Vanilla Extract server
+ */
+ @BeforeClass
+ public static void setUp() throws Exception {
+ testDBName = generateNewDB();
+ if (testDBName == null) {
+ throw new Exception("failed to setup test db");
+ }
+
+ String jdbcUrl = "jdbc:postgresql://localhost/" + testDBName;
+ String[] postgresSinkArgs = {
+ "./src/test/resources/bangor_maine.osm.pbf",
+ jdbcUrl
+ };
+
+ // perform file to postgres load
+ PostgresSink.main(postgresSinkArgs);
+ VanillaExtract.startServer(jdbcUrl);
+ }
+
+ /**
+ * Make sure the server is replying with a helpful message if an improper path is supplied.
+ */
+ @Test
+ public void failsOnBadCoordinates() {
+ String response = given().port(9002).get("/gimme-my-data.pdf").asString();
+
+ // assert that response has expected error message
+ assertThat(response, containsString("URI format"));
+ }
+
+ /**
+ * Ensures that the server returns a valid extract of data.
+ */
+ @Test
+ public void getsExtract() {
+ String response = given().port(9002).get("/44.801884,-68.782802,44.805081,-68.779181.pbf").asString();
+
+ // assert that the response is not empty
+ assertThat(response.length(), greaterThan(0));
+
+ // assert that the response is not an error response
+ assertThat(response, not(containsString("URI format")));
+
+ // assert that the response is a valid pbf with osm data
+ OSM test = new OSM(null);
+ test.readFromUrl("http://localhost:9002/44.801884,-68.782802,44.805081,-68.779181.pbf");
+ assertThat(test.nodes.size(), equalTo(37));
+ assertThat(test.relations.size(), equalTo(0));
+ assertThat(test.ways.size(), equalTo(5));
+ }
+}