Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
Original file line number Diff line number Diff line change
Expand Up @@ -1052,10 +1052,25 @@ public static Path toTaskTempPath(Path orig) {
}

public static Path toTempPath(Path orig) {
if (orig.getName().indexOf(tmpPrefix) == 0) {
return toTempPath(orig, tmpPrefix);
}

private static Path toTempPath(Path orig, String prefix) {
if (orig.getName().indexOf(prefix) == 0) {
return orig;
}
return new Path(orig.getParent(), tmpPrefix + orig.getName());
return new Path(orig.getParent(), prefix + orig.getName());
}

/**
* This method is to convert a path into a temporary path for the direct insert manifest files.
* It is important to use a prefix which starts with '_', like '_tmp.', so the content of this
* directory would be filtered out by the AcidUtils.acidHiddenFileFilter.
* @param orig
* @return
*/
public static Path toManifestDirTempPath(String orig) {
return toTempPath(new Path(orig), hadoopTmpPrefix);
}

/**
Expand Down Expand Up @@ -4564,7 +4579,7 @@ private static Path getManifestDir(Path specPath, long writeId, int stmtId, Stri
if (isDelete) {
deltaDir = AcidUtils.deleteDeltaSubdir(writeId, writeId, stmtId);
}
Path manifestPath = new Path(manifestRoot, Utilities.toTempPath(deltaDir));
Path manifestPath = new Path(manifestRoot, Utilities.toManifestDirTempPath(deltaDir));

if (isInsertOverwrite) {
// When doing a multi-statement insert overwrite query with dynamic partitioning, the
Expand Down
34 changes: 34 additions & 0 deletions ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
Expand Down Expand Up @@ -836,6 +838,38 @@ public void testSetPermissionsOnExistingDir() throws IOException {
Assert.assertEquals((short) 0777, fs.getFileStatus(path).getPermission().toShort());
}

@Test
public void testWritingManifestFile() throws HiveException, IOException {
String testTableName = "testWritingManifest";
JobConf jobConf = new JobConf();
FileSystem fs = FileSystem.getLocal(jobConf);
Path testTablePath = new Path(HiveConf.getVar(jobConf, HiveConf.ConfVars.LOCAL_SCRATCH_DIR) + "/" + testTableName);
try {
fs.mkdirs(testTablePath);
List<Path> commitPaths = new ArrayList<>();
commitPaths.add(new Path(testTableName + "/delta00001_00001/00000_0"));
Utilities.writeCommitManifest(commitPaths, testTablePath, fs,
"00001", 2L, 0, null, false,
false, null, null, false);

RemoteIterator<LocatedFileStatus> it = fs.listFiles(testTablePath, true);
List<Path> resultPaths = new ArrayList<>();
while(it.hasNext()) {
resultPaths.add(it.next().getPath());
}
assertEquals(1, resultPaths.size());
Path resultPath = resultPaths.get(0);
assertEquals("00001.manifest", resultPath.getName());
assertEquals("_tmp.delta_0000002_0000002_0000", resultPath.getParent().getName());
FileStatus[] files = fs.listStatus(testTablePath, AcidUtils.acidHiddenFileFilter);
assertEquals(0, files.length);
} finally {
if (fs.exists(testTablePath)) {
fs.delete(testTablePath, true);
}
}
}

private FileStatus[] generateTestNotEmptyFileStatuses(String... fileNames) {
return generateTestNotEmptyFileStatuses(null, fileNames);
}
Expand Down