Skip to content

Commit

Permalink
ISSUE-6 and webhdfs support
Browse files Browse the repository at this point in the history
  • Loading branch information
dstreev committed Aug 9, 2019
1 parent ab8f196 commit 7a09f73
Show file tree
Hide file tree
Showing 12 changed files with 1,312 additions and 114 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.streever.hadoop</groupId>
<artifactId>hadoop.cli</artifactId>
<version>2.0.16-SNAPSHOT</version>
<version>2.0.17-SNAPSHOT</version>
<name>Hadoop CLI</name>
<description>Hadoop Command Line Interface</description>
<properties>
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/com/streever/hadoop/AbstractStats.java
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ public final CommandReturn execute(Environment environment, CommandLine cmd, Con

if (cmd.hasOption("output")) {
// Get a handle to the FileSystem if we intent to write our results to the HDFS.
baseOutputDir = buildPath2(fs.getWorkingDirectory().toString().substring(((String) env.getProperties().getProperty(Constants.HDFS_URL)).length()), cmd.getOptionValue("output"));
baseOutputDir = pathBuilder.resolveFullPath(fs.getWorkingDirectory().toString().substring(((String) env.getProperties().getProperty(Constants.HDFS_URL)).length()), cmd.getOptionValue("output"));
} else {
baseOutputDir = null;
}
Expand Down
1,104 changes: 1,104 additions & 0 deletions src/main/java/com/streever/hadoop/HadoopSession.java

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package com.streever.hadoop.hdfs.shell.command;

public enum ContextSide {
LEFT,RIGHT
}
101 changes: 10 additions & 91 deletions src/main/java/com/streever/hadoop/hdfs/shell/command/HdfsAbstract.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import com.streever.tools.stemshell.command.AbstractCommand;
import jline.console.completer.Completer;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public abstract class HdfsAbstract extends AbstractCommand {

Expand All @@ -42,124 +43,42 @@ public abstract class HdfsAbstract extends AbstractCommand {
public int CODE_NOT_FOUND = -99;

protected Environment env;

enum Side {
LEFT,RIGHT
}

protected Direction directionContext = null;

protected int directives = 0;
protected boolean directivesBefore = true;
protected boolean directivesOptional = false;

protected PathBuilder pathBuilder;
protected PathDirectives pathDirectives;

public HdfsAbstract(String name) {
super(name);
}

public HdfsAbstract(String name, Environment env, Direction directionContext ) {
super(name);
pathDirectives = new PathDirectives(directionContext);
pathBuilder = new PathBuilder(env, pathDirectives);
this.env = env;
this.directionContext = directionContext;
}

public HdfsAbstract(String name, Environment env, Direction directionContext, int directives ) {
super(name);
this.env = env;
this.directionContext = directionContext;
this.directives = directives;
pathDirectives = new PathDirectives(directionContext, directives);
pathBuilder = new PathBuilder(env, pathDirectives);
}

public HdfsAbstract(String name, Environment env, Direction directionContext, int directives, boolean directivesBefore, boolean directivesOptional ) {
super(name);
this.env = env;
this.directionContext = directionContext;
this.directives = directives;
this.directivesBefore = directivesBefore;
this.directivesOptional = directivesOptional;
pathDirectives = new PathDirectives(directionContext, directives, directivesBefore, directivesOptional);
pathBuilder = new PathBuilder(env, pathDirectives);
}

public HdfsAbstract(String name, Environment env) {
super(name);
this.env = env;
}

protected String buildPath(Side side, String[] args, Direction context) {
String rtn = null;

FileSystem localfs = (FileSystem)env.getValue(Constants.LOCAL_FS);
FileSystem hdfs = (FileSystem) env.getValue(Constants.HDFS);

String in = null;

switch (side) {
case LEFT:
if (args.length > 0)
if (directivesBefore) {
in = args[directives];
} else {
if (directivesOptional) {
if (args.length > directives) {
in = args[args.length-(directives+1)];
} else {
// in is null
}
} else {
in = args[args.length-(directives+1)];
}
}
switch (context) {
case REMOTE_LOCAL:
case REMOTE_REMOTE:
case NONE:
rtn = buildPath2(hdfs.getWorkingDirectory().toString().substring(((String)env.getProperties().getProperty(Constants.HDFS_URL)).length()), in);
break;
case LOCAL_REMOTE:
rtn = buildPath2(localfs.getWorkingDirectory().toString().substring(5), in);
break;
}
break;
case RIGHT:
if (args.length > 1)
if (directivesBefore)
in = args[directives + 1];
else
in = args[args.length-(directives+1)];
switch (context) {
case REMOTE_LOCAL:
rtn = buildPath2(localfs.getWorkingDirectory().toString().substring(5), in);
break;
case LOCAL_REMOTE:
case REMOTE_REMOTE:
rtn = buildPath2(hdfs.getWorkingDirectory().toString().substring(((String)env.getProperties().getProperty(Constants.HDFS_URL)).length()), in);
break;
case NONE:
break;
}
break;
}
if (rtn != null && rtn.contains(" ")) {
rtn = "'" + rtn + "'";
}
return rtn;
}

protected String buildPath2(String current, String input) {
if (input != null) {
if (input.startsWith("/"))
return input;
else
return current + "/" + input;
} else {
return current;
}
}


@Override
public Completer getCompleter() {
return new FileSystemNameCompleter(this.env, false);
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ public CommandReturn execute(Environment env, CommandLine cmd, ConsoleReader rea
hdfs = (FileSystem) env.getValue(Constants.HDFS);

String dir = cmd.getArgs().length == 0 ? "/" : cmd.getArgs()[0];
if (dir.startsWith("\"") & dir.endsWith("\"")) {
dir = dir.substring(1, dir.length()-1);
}
logv(env, "CWD before: " + hdfs.getWorkingDirectory());
logv(env, "Requested CWD: " + dir);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public CommandReturn execute(Environment env, CommandLine cmd, ConsoleReader rea
String leftPath = null;
String rightPath = null;

switch (directionContext) {
switch (pathDirectives.getDirection()) {
case REMOTE_LOCAL:
pathCount += 2; // Source and Destination Path Elements.
break;
Expand All @@ -104,9 +104,9 @@ public CommandReturn execute(Environment env, CommandLine cmd, ConsoleReader rea
pathCount += 1;
}

leftPath = buildPath(Side.LEFT, cmdArgs, directionContext);
if (directionContext != Direction.NONE) {
rightPath = buildPath(Side.RIGHT, cmdArgs, directionContext);
leftPath = pathBuilder.buildPath(Side.LEFT, cmdArgs);
if (pathDirectives.getDirection() != Direction.NONE) {
rightPath = pathBuilder.buildPath(Side.RIGHT, cmdArgs);
}

String[] newCmdArgs = new String[pathCount];
Expand All @@ -117,16 +117,16 @@ public CommandReturn execute(Environment env, CommandLine cmd, ConsoleReader rea
newCmdArgs[0] = leftPath;
}

argv = new String[cmdOpts.length + newCmdArgs.length + 1 + directives];
argv = new String[cmdOpts.length + newCmdArgs.length + 1 + pathDirectives.getDirectives()];

int pos = 1;

for (Option opt: cmdOpts) {
argv[pos++] = "-" + opt.getOpt();
}

if (directivesBefore) {
for (int i = 0; i < directives; i++) {
if (pathDirectives.isBefore()) {
for (int i = 0; i < pathDirectives.getDirectives(); i++) {
argv[pos++] = cmdArgs[i];
}
}
Expand All @@ -135,8 +135,8 @@ public CommandReturn execute(Environment env, CommandLine cmd, ConsoleReader rea
argv[pos++] = arg;
}

if (!directivesBefore) {
for (int i = directives; i > 0; i--) {
if (!pathDirectives.isBefore()) {
for (int i = pathDirectives.getDirectives(); i > 0; i--) {
try {
argv[pos++] = cmdArgs[cmdArgs.length - (i)];
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
package com.streever.hadoop.hdfs.shell.command;

import com.streever.tools.stemshell.Environment;
import org.apache.hadoop.fs.FileSystem;

public class PathBuilder {

private Environment env;
private PathDirectives directives;

public PathBuilder(Environment env, PathDirectives directives) {
this.env = env;
this.directives = directives;
}

public String buildPath(Side side, String[] args) {
String rtn = null;

FileSystem localfs = (FileSystem)env.getValue(Constants.LOCAL_FS);
FileSystem hdfs = (FileSystem) env.getValue(Constants.HDFS);

String in = null;

switch (side) {
case LEFT:
if (args.length > 0)
if (directives.isBefore()) {
in = args[directives.getDirectives()];
} else {
if (directives.isOptional()) {
if (args.length > directives.getDirectives()) {
in = args[args.length-(directives.getDirectives()+1)];
} else {
// in is null
}
} else {
in = args[args.length-(directives.getDirectives()+1)];
}
}
switch (directives.getDirection()) {
case REMOTE_LOCAL:
case REMOTE_REMOTE:
case NONE:
rtn = resolveFullPath(hdfs.getWorkingDirectory().toString().substring(((String)env.getProperties().getProperty(Constants.HDFS_URL)).length()), in);
break;
case LOCAL_REMOTE:
rtn = resolveFullPath(localfs.getWorkingDirectory().toString().substring(5), in);
break;
}
break;
case RIGHT:
if (args.length > 1)
if (directives.isBefore())
in = args[directives.getDirectives() + 1];
else
in = args[args.length-(directives.getDirectives()+1)];
switch (directives.getDirection()) {
case REMOTE_LOCAL:
rtn = resolveFullPath(localfs.getWorkingDirectory().toString().substring(5), in);
break;
case LOCAL_REMOTE:
case REMOTE_REMOTE:
rtn = resolveFullPath(hdfs.getWorkingDirectory().toString().substring(((String)env.getProperties().getProperty(Constants.HDFS_URL)).length()), in);
break;
case NONE:
break;
}
break;
}
// if (rtn != null && rtn.contains(" ")) {
// rtn = "\"" + rtn + "\"";
// }
return rtn;
}

public static String resolveFullPath(String current, String input) {
String adjusted = null;
boolean enclose = false;
if (input != null) {
if (input.startsWith("\"") & input.endsWith("\"")) {
adjusted = input.substring(1, input.length()-1);
} else {
adjusted = input;
}
if (!adjusted.startsWith("/"))
adjusted = current + "/" + adjusted;
} else {
adjusted = current;
// return current;
}
// if (adjusted.contains(" ")) {
// adjusted = "\\\"" + adjusted + "\\\"";
// }
return adjusted;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package com.streever.hadoop.hdfs.shell.command;

public class PathDirectives {
private Direction direction = null;

private int directives = 0; //default
private boolean before = true; //default
private boolean optional = false; //default

public Direction getDirection() {
return direction;
}

public int getDirectives() {
return directives;
}

public boolean isBefore() {
return before;
}

public boolean isOptional() {
return optional;
}

public PathDirectives(Direction direction, int directives, boolean before, boolean optional) {
this.direction = direction;
this.directives = directives;
this.before = before;
this.optional = optional;
}

public PathDirectives(Direction direction, int directives, boolean before) {
this.direction = direction;
this.directives = directives;
this.before = before;
}

public PathDirectives(Direction direction, int directives) {
this.direction = direction;
this.directives = directives;
}

public PathDirectives(Direction direction) {
this.direction = direction;
}

}
Loading

0 comments on commit 7a09f73

Please sign in to comment.