Skip to content

Commit dde3bc4

Browse files
committed
Merge branch 'dev' into feature/DT-530-additional-feed-source-summary-fields
2 parents 43756e8 + 6a69a22 commit dde3bc4

28 files changed

+417
-393
lines changed
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
package com.conveyal.datatools.common.utils;
2+
3+
import java.io.Closeable;
4+
import java.io.IOException;
5+
import java.net.HttpURLConnection;
6+
import java.net.URL;
7+
8+
/**
9+
* A wrapper around HttpURLConnection that implements Closeable so it can be used in try-with-resources blocks.
10+
*/
11+
public class CloseableHttpURLConnection implements Closeable {
12+
private final HttpURLConnection connection;
13+
14+
public CloseableHttpURLConnection(URL url) throws IOException {
15+
this.connection = (HttpURLConnection) url.openConnection();
16+
}
17+
18+
public HttpURLConnection getConnection() {
19+
return connection;
20+
}
21+
22+
@Override
23+
public void close() {
24+
if (connection != null) {
25+
connection.disconnect();
26+
}
27+
}
28+
}

src/main/java/com/conveyal/datatools/common/utils/SparkUtils.java

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -258,13 +258,12 @@ public static void logRequestOrResponse(
258258
* multipart form handling code also caches the request body.
259259
*/
260260
public static void copyRequestStreamIntoFile(Request req, File file) {
261-
try {
261+
try (
262262
ServletInputStream inputStream = ((ServletRequestWrapper) req.raw()).getRequest().getInputStream();
263263
FileOutputStream fileOutputStream = new FileOutputStream(file);
264+
) {
264265
// Guava's ByteStreams.copy uses a 4k buffer (no need to wrap output stream), but does not close streams.
265266
ByteStreams.copy(inputStream, fileOutputStream);
266-
fileOutputStream.close();
267-
inputStream.close();
268267
if (file.length() == 0) {
269268
// Throw IO exception to be caught and returned to user via halt.
270269
throw new IOException("No file found in request body.");
@@ -299,10 +298,13 @@ public static String uploadMultipartRequestBodyToS3(Request req, String uploadTy
299298

300299
extension = "." + part.getContentType().split("/", 0)[1];
301300
tempFile = File.createTempFile(part.getName() + "_" + uploadType, extension);
302-
InputStream inputStream;
303-
inputStream = part.getInputStream();
304-
FileOutputStream out = new FileOutputStream(tempFile);
305-
IOUtils.copy(inputStream, out);
301+
302+
try (
303+
InputStream inputStream = part.getInputStream();
304+
FileOutputStream out = new FileOutputStream(tempFile)
305+
) {
306+
IOUtils.copy(inputStream, out);
307+
}
306308
} catch (IOException | ServletException e) {
307309
e.printStackTrace();
308310
logMessageAndHalt(req, 400, "Unable to read uploaded file");

src/main/java/com/conveyal/datatools/editor/jobs/ExportSnapshotToGTFSJob.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,8 @@ public void jobLogic() {
8282
}
8383
LOG.info("Storing snapshot GTFS at {}", S3Utils.getDefaultBucketUriForKey(s3Key));
8484
} else {
85-
try {
86-
File gtfsFile = FeedVersion.feedStore.newFeed(filename, new FileInputStream(tempFile), null);
85+
try (FileInputStream fileInputStream = new FileInputStream(tempFile)) {
86+
File gtfsFile = FeedVersion.feedStore.newFeed(filename, fileInputStream, null);
8787
if (isNewVersion) feedVersion.assignGtfsFileAttributes(gtfsFile);
8888
} catch (IOException e) {
8989
status.fail(String.format("Could not store feed for snapshot %s", snapshot.id), e);

src/main/java/com/conveyal/datatools/editor/utils/ClassLoaderSerializer.java

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,20 @@ public class ClassLoaderSerializer implements Serializer<Object>, Serializable {
1919

2020
@Override
2121
public void serialize(DataOutput out, Object value) throws IOException {
22-
ObjectOutputStream out2 = new ObjectOutputStream((OutputStream) out);
23-
out2.writeObject(value);
24-
out2.flush();
22+
try (ObjectOutputStream out2 = new ObjectOutputStream((OutputStream) out)) {
23+
out2.writeObject(value);
24+
out2.flush();
25+
}
2526
}
2627

2728
@Override
2829
public Object deserialize(DataInput in, int available) throws IOException {
29-
try {
30-
ObjectInputStream in2 = new ClassLoaderObjectInputStream(Thread.currentThread().getContextClassLoader(), (InputStream) in);
30+
try (
31+
ObjectInputStream in2 = new ClassLoaderObjectInputStream(
32+
Thread.currentThread().getContextClassLoader(),
33+
(InputStream) in
34+
)
35+
) {
3136
return in2.readObject();
3237
} catch (ClassNotFoundException e) {
3338
throw new IOException(e);
Lines changed: 28 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package com.conveyal.datatools.editor.utils;
22

3-
import java.io.Closeable;
43
import java.io.File;
54
import java.io.FileInputStream;
65
import java.io.FileOutputStream;
@@ -18,35 +17,34 @@ public class DirectoryZip {
1817

1918
public static void zip(File directory, File zipfile) throws IOException {
2019
URI base = directory.toURI();
21-
Deque<File> queue = new LinkedList<File>();
20+
Deque<File> queue = new LinkedList<>();
2221
queue.push(directory);
23-
OutputStream out = new FileOutputStream(zipfile);
24-
Closeable res = out;
25-
try {
26-
ZipOutputStream zout = new ZipOutputStream(out);
27-
res = zout;
28-
while (!queue.isEmpty()) {
29-
directory = queue.pop();
30-
for (File kid : directory.listFiles()) {
31-
String name = base.relativize(kid.toURI()).getPath();
32-
if (kid.isDirectory()) {
33-
queue.push(kid);
34-
name = name.endsWith("/") ? name : name + "/";
35-
zout.putNextEntry(new ZipEntry(name));
36-
} else {
37-
zout.putNextEntry(new ZipEntry(name));
38-
copy(kid, zout);
39-
zout.closeEntry();
40-
}
22+
23+
try (
24+
FileOutputStream out = new FileOutputStream(zipfile);
25+
ZipOutputStream zout = new ZipOutputStream(out)
26+
) {
27+
while (!queue.isEmpty()) {
28+
directory = queue.pop();
29+
for (File kid : directory.listFiles()) {
30+
String name = base.relativize(kid.toURI()).getPath();
31+
if (kid.isDirectory()) {
32+
queue.push(kid);
33+
name = name.endsWith("/") ? name : name + "/";
34+
zout.putNextEntry(new ZipEntry(name));
35+
} else {
36+
zout.putNextEntry(new ZipEntry(name));
37+
copy(kid, zout);
38+
}
39+
// Explicitly close directory entry to ensure ZIP structure is valid; omitting this may corrupt the
40+
// archive.
41+
zout.closeEntry();
42+
}
4143
}
42-
}
43-
} finally {
44-
res.close();
4544
}
46-
}
45+
}
4746

48-
49-
private static void copy(InputStream in, OutputStream out) throws IOException {
47+
private static void copy(InputStream in, OutputStream out) throws IOException {
5048
byte[] buffer = new byte[1024];
5149
while (true) {
5250
int readCount = in.read(buffer);
@@ -57,22 +55,9 @@ private static void copy(InputStream in, OutputStream out) throws IOException {
5755
}
5856
}
5957

60-
private static void copy(File file, OutputStream out) throws IOException {
61-
InputStream in = new FileInputStream(file);
62-
try {
63-
copy(in, out);
64-
} finally {
65-
in.close();
58+
private static void copy(File file, OutputStream out) throws IOException {
59+
try (InputStream in = new FileInputStream(file)) {
60+
copy(in, out);
6661
}
67-
}
68-
69-
private static void copy(InputStream in, File file) throws IOException {
70-
OutputStream out = new FileOutputStream(file);
71-
try {
72-
copy(in, out);
73-
} finally {
74-
out.close();
75-
}
76-
}
77-
62+
}
7863
}

src/main/java/com/conveyal/datatools/manager/DataManager.java

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -159,9 +159,10 @@ static void initializeApplication(String[] args, boolean initScheduledTasks) thr
159159
*/
160160
private static void loadProperties() {
161161
final Properties projectProperties = new Properties();
162-
InputStream projectPropertiesInputStream =
163-
DataManager.class.getClassLoader().getResourceAsStream(".properties");
164-
try {
162+
try (
163+
InputStream projectPropertiesInputStream =
164+
DataManager.class.getClassLoader().getResourceAsStream(".properties");
165+
) {
165166
projectProperties.load(projectPropertiesInputStream);
166167
repoUrl = projectProperties.getProperty("repo_url");
167168
} catch (IOException e) {
@@ -170,9 +171,10 @@ private static void loadProperties() {
170171
}
171172

172173
final Properties gitProperties = new Properties();
173-
try {
174+
try (
174175
InputStream gitPropertiesInputStream =
175176
DataManager.class.getClassLoader().getResourceAsStream("git.properties");
177+
) {
176178
gitProperties.load(gitPropertiesInputStream);
177179
commit = gitProperties.getProperty("git.commit.id");
178180
} catch (Exception e) {
@@ -478,24 +480,28 @@ private static void registerExternalResources() {
478480
* default configuration file locations. Config fields are retrieved with getConfigProperty.
479481
*/
480482
private static void loadConfig(String[] args) throws IOException {
481-
FileInputStream envConfigStream;
482-
FileInputStream serverConfigStream;
483+
File envFile;
484+
File serverFile;
483485

484486
if (args.length == 0) {
485487
LOG.warn("Using default env.yml: {}", DEFAULT_ENV);
486488
LOG.warn("Using default server.yml: {}", DEFAULT_CONFIG);
487-
envConfigStream = new FileInputStream(new File(DEFAULT_ENV));
488-
serverConfigStream = new FileInputStream(new File(DEFAULT_CONFIG));
489-
}
490-
else {
489+
envFile = new File(DEFAULT_ENV);
490+
serverFile = new File(DEFAULT_CONFIG);
491+
} else {
491492
LOG.info("Loading env.yml: {}", args[0]);
492493
LOG.info("Loading server.yml: {}", args[1]);
493-
envConfigStream = new FileInputStream(new File(args[0]));
494-
serverConfigStream = new FileInputStream(new File(args[1]));
494+
envFile = new File(args[0]);
495+
serverFile = new File(args[1]);
495496
}
496497

497-
envConfig = yamlMapper.readTree(envConfigStream);
498-
serverConfig = yamlMapper.readTree(serverConfigStream);
498+
try (
499+
FileInputStream envConfigStream = new FileInputStream(envFile);
500+
FileInputStream serverConfigStream = new FileInputStream(serverFile)
501+
) {
502+
envConfig = yamlMapper.readTree(envConfigStream);
503+
serverConfig = yamlMapper.readTree(serverConfigStream);
504+
}
499505
}
500506

501507
/**

src/main/java/com/conveyal/datatools/manager/controllers/api/DeploymentController.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ private static FileInputStream downloadDeployment (Request req, Response res) th
159159
File temp = File.createTempFile("deployment", ".zip");
160160
// just include GTFS, not any of the ancillary information
161161
deployment.dump(temp, false, false, false);
162+
// File input stream must be closed by calling method.
162163
FileInputStream fis = new FileInputStream(temp);
163164
String cleanName = deployment.name.replaceAll("[^a-zA-Z0-9]", "");
164165
res.type("application/zip");

src/main/java/com/conveyal/datatools/manager/controllers/api/GtfsPlusController.java

Lines changed: 19 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -100,10 +100,9 @@ private static HttpServletResponse getGtfsPlusFromGtfs(String feedVersionId, Req
100100
gtfsPlusTables.add(tableNode.get("name").asText());
101101
}
102102

103-
try {
103+
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(gtfsPlusFile))) {
104104
// create a new zip file to only contain the GTFS+ tables
105105
gtfsPlusFile = File.createTempFile(version.id + "_gtfsplus", ".zip");
106-
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(gtfsPlusFile));
107106

108107
// iterate through the existing GTFS file, copying any GTFS+ tables
109108
ZipFile gtfsFile = new ZipFile(version.retrieveGtfsFile());
@@ -116,15 +115,14 @@ private static HttpServletResponse getGtfsPlusFromGtfs(String feedVersionId, Req
116115
// create a new empty ZipEntry and copy the contents
117116
ZipEntry newEntry = new ZipEntry(entry.getName());
118117
zos.putNextEntry(newEntry);
119-
InputStream in = gtfsFile.getInputStream(entry);
120-
while (0 < in.available()){
121-
int read = in.read(buffer);
122-
zos.write(buffer,0,read);
118+
try (InputStream in = gtfsFile.getInputStream(entry)) {
119+
while (0 < in.available()) {
120+
int read = in.read(buffer);
121+
zos.write(buffer, 0, read);
122+
}
123123
}
124-
in.close();
125124
zos.closeEntry();
126125
}
127-
zos.close();
128126
} catch (IOException e) {
129127
logMessageAndHalt(req, 500, "An error occurred while trying to create a gtfs file", e);
130128
}
@@ -177,10 +175,9 @@ private static String publishGtfsPlusFile(Request req, Response res) {
177175

178176
File newFeed = null;
179177

180-
try {
178+
try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(newFeed))) {
181179
// First, create a new zip file to only contain the GTFS+ tables
182180
newFeed = File.createTempFile(feedVersionId + "_new", ".zip");
183-
ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(newFeed));
184181

185182
// Next, iterate through the existing GTFS file, copying all non-GTFS+ tables.
186183
ZipFile gtfsFile = new ZipFile(feedVersion.retrieveGtfsFile());
@@ -194,12 +191,12 @@ private static String publishGtfsPlusFile(Request req, Response res) {
194191
// create a new empty ZipEntry and copy the contents
195192
ZipEntry newEntry = new ZipEntry(entry.getName());
196193
zos.putNextEntry(newEntry);
197-
InputStream in = gtfsFile.getInputStream(entry);
198-
while (0 < in.available()){
199-
int read = in.read(buffer);
200-
zos.write(buffer,0,read);
194+
try (InputStream in = gtfsFile.getInputStream(entry)) {
195+
while (0 < in.available()) {
196+
int read = in.read(buffer);
197+
zos.write(buffer, 0, read);
198+
}
201199
}
202-
in.close();
203200
zos.closeEntry();
204201
}
205202

@@ -211,23 +208,22 @@ private static String publishGtfsPlusFile(Request req, Response res) {
211208

212209
ZipEntry newEntry = new ZipEntry(entry.getName());
213210
zos.putNextEntry(newEntry);
214-
InputStream in = plusZipFile.getInputStream(entry);
215-
while (0 < in.available()){
216-
int read = in.read(buffer);
217-
zos.write(buffer,0,read);
211+
try (InputStream in = plusZipFile.getInputStream(entry)) {
212+
while (0 < in.available()) {
213+
int read = in.read(buffer);
214+
zos.write(buffer, 0, read);
215+
}
218216
}
219-
in.close();
220217
zos.closeEntry();
221218
}
222-
zos.close();
223219
} catch (IOException e) {
224220
logMessageAndHalt(req, 500, "Error creating combined GTFS/GTFS+ file", e);
225221
}
226222
// Create a new feed version to represent the published GTFS+.
227223
FeedVersion newFeedVersion = new FeedVersion(feedVersion.parentFeedSource(), PRODUCED_IN_HOUSE_GTFS_PLUS);
228224
File newGtfsFile = null;
229-
try {
230-
newGtfsFile = newFeedVersion.newGtfsFile(new FileInputStream(newFeed));
225+
try (FileInputStream fis = new FileInputStream(newFeed)) {
226+
newGtfsFile = newFeedVersion.newGtfsFile(fis);
231227
} catch (IOException e) {
232228
e.printStackTrace();
233229
logMessageAndHalt(req, 500, "Error reading GTFS file input stream", e);

0 commit comments

Comments
 (0)