Skip to content

Commit

Permalink
ISSUE #13
Browse files Browse the repository at this point in the history
  • Loading branch information
dstreev committed Dec 5, 2019
1 parent de57f5c commit 638ef6a
Show file tree
Hide file tree
Showing 6 changed files with 126 additions and 70 deletions.
21 changes: 12 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,19 @@ To use an alternate HADOOP_CONF_DIR:

```
usage: hadoopcli
-d,--debug Debug Commands
-e,--execute <command [args]> Execute Command
-f,--file <file to exec> Run File and Exit
-d,--debug Debug Commands
-e,--execute <command [args]> Execute Command
-f,--file <file to exec> Run File and Exit
-h,--help
-i,--init <init set> Initialize with set
-p,--password <password> Password
-s,--silent Suppress Banner
-stdin,--stdin Run Stdin pipe and Exit
-u,--username <username> Username to log into gateway
-v,--verbose Verbose Commands
-i,--init <init set> Initialize with set
-s,--silent Suppress Banner
-stdin,--stdin Run Stdin pipe and Exit
-t,--template <template> Template to apply on
input (-f | -stdin)
-td,--template-delimiter <template-delimiter> Delimiter to apply to
'input' for template
option (default=',')
-v,--verbose Verbose Commands
```

### File System Command Basics
Expand Down
10 changes: 10 additions & 0 deletions bin/hadoopcli
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,16 @@ while [[ $# -gt 0 ]]; do
shift
PRG_ARGS="${PRG_ARGS} -d"
;;
-t|--template)
shift
PRG_ARGS="${PRG_ARGS} -t \"${1}\""
shift
;;
-td|--template-delimiter)
shift
PRG_ARGS="${PRG_ARGS} -td \"${1}\""
shift
;;
-stdin)
shift
PRG_ARGS="${PRG_ARGS} -stdin"
Expand Down
34 changes: 9 additions & 25 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,13 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.streever.hadoop</groupId>
<artifactId>hadoop.cli</artifactId>
<version>2.0.21-SNAPSHOT</version>
<version>2.0.22-SNAPSHOT</version>
<name>Hadoop CLI</name>
<description>Hadoop Command Line Interface</description>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- See Profiles for the correct version of Hadoop -->
<!--<hadoop.version>2.7.1.2.4.1.0-327</hadoop.version>-->
<!--<hadoop.version>2.6.0.2.2.8.0-3150</hadoop.version>-->
Expand All @@ -38,7 +41,7 @@
<stem-shell.version>1.0.7-SNAPSHOT</stem-shell.version>
</properties>
<repositories>
<repository>
<!--repository>
<releases>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
Expand Down Expand Up @@ -70,7 +73,7 @@
<url>http://repo.hortonworks.com/content/groups/public/</url>
<layout>default</layout>
</repository>
<!--repository>
<repository>
<releases>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
Expand Down Expand Up @@ -160,7 +163,7 @@
<goal>shade</goal>
</goals>
<configuration>
<createSourcesJar>true</createSourcesJar>
<createSourcesJar>false</createSourcesJar>
<finalName>hadoop-cli-full-bin</finalName>
<shadedArtifactAttached>true</shadedArtifactAttached>
<filters>
Expand All @@ -185,14 +188,6 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
Expand All @@ -214,17 +209,6 @@
</plugins>
</build>
<profiles>
<profile>
<id>2.6</id>
<!--<activation>-->
<!--<activeByDefault>true</activeByDefault>-->
<!--</activation>-->
<properties>
<hadoop.version>2.6.0.2.2.9.9-2</hadoop.version>
<gateway-shell.version>0.5.0.2.2.9.9-2</gateway-shell.version>
<hadoop.major>2.6</hadoop.major>
</properties>
</profile>
<profile>
<id>2.7</id>
<properties>
Expand All @@ -239,8 +223,8 @@
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<hadoop.version>3.1.1.3.1.0.0-78</hadoop.version>
<gateway-shell.version>1.0.0.3.1.0.0-78</gateway-shell.version>
<hadoop.version>3.1.1</hadoop.version>
<gateway-shell.version>1.0.0</gateway-shell.version>
<hadoop.major>3.1</hadoop.major>
</properties>
</profile>
Expand Down
98 changes: 65 additions & 33 deletions src/main/java/com/streever/hadoop/HadoopShell.java
Original file line number Diff line number Diff line change
Expand Up @@ -706,6 +706,7 @@
import org.apache.hadoop.fs.FileSystem;

import java.io.*;
import java.text.MessageFormat;
import java.util.concurrent.*;

public class HadoopShell extends com.streever.tools.stemshell.AbstractShell {
Expand Down Expand Up @@ -744,6 +745,20 @@ private Options getOptions() {
.build();
options.addOption(fileOption);

Option templateOption = Option.builder("t").required(false)
.argName("template").desc("Template to apply on input (-f | -stdin)")
.longOpt("template")
.hasArg(true).numberOfArgs(1)
.build();
options.addOption(templateOption);

Option delimiterOption = Option.builder("td").required(false)
.argName("template-delimiter").desc("Delimiter to apply to 'input' for template option (default=',')")
.longOpt("template-delimiter")
.hasArg(true).numberOfArgs(1)
.build();
options.addOption(delimiterOption);

// add stdin option
Option siOption = Option.builder("stdin").required(false)
.argName("stdin process").desc("Run Stdin pipe and Exit")
Expand Down Expand Up @@ -773,26 +788,26 @@ private Options getOptions() {
.build();
options.addOption(debugOption);

Option usernameOption = Option.builder("u").required(false)
.argName("username").desc("Username to log into gateway")
.longOpt("username")
.hasArg(true).numberOfArgs(1)
.build();
options.addOption(usernameOption);

Option passwordOption = Option.builder("p").required(false)
.argName("password").desc("Password")
.longOpt("password")
.hasArg(true).numberOfArgs(1)
.build();
options.addOption(passwordOption);

Option webhdfsOption = Option.builder("w").required(false)
.argName("webhdfs://<host>:<port>").desc("Connect via webhdfs")
.longOpt("webhdfs")
.hasArg(true).numberOfArgs(1)
.build();
options.addOption(webhdfsOption);
// Option usernameOption = Option.builder("u").required(false)
// .argName("username").desc("Username to log into gateway")
// .longOpt("username")
// .hasArg(true).numberOfArgs(1)
// .build();
// options.addOption(usernameOption);

// Option passwordOption = Option.builder("p").required(false)
// .argName("password").desc("Password")
// .longOpt("password")
// .hasArg(true).numberOfArgs(1)
// .build();
// options.addOption(passwordOption);

// Option webhdfsOption = Option.builder("w").required(false)
// .argName("webhdfs://<host>:<port>").desc("Connect via webhdfs")
// .longOpt("webhdfs")
// .hasArg(true).numberOfArgs(1)
// .build();
// options.addOption(webhdfsOption);

// Need to add mechanism to use a password from file.
// Need to add mechanism to pull username from file.
Expand Down Expand Up @@ -888,16 +903,24 @@ protected boolean postProcessInitializationArguments(String[] arguments, Console
}

if (cmd.hasOption("i")) {
runFile(cmd.getOptionValue("i"), reader);
runFile(cmd.getOptionValue("i"), null, null, reader);
}

if (cmd.hasOption("e")) {
processInput(cmd.getOptionValue("e"), reader);
processInput("exit", reader);
}
String template = null;
String delimiter = null;
if (cmd.hasOption("t")) {
template = cmd.getOptionValue("t");
}
if (cmd.hasOption("td")) {
delimiter = cmd.getOptionValue("td");
}

if (cmd.hasOption("f")) {
runFile(cmd.getOptionValue("f"), reader);
runFile(cmd.getOptionValue("f"), template, delimiter, reader);
processInput("exit", reader);
}

Expand All @@ -915,7 +938,7 @@ protected boolean postProcessInitializationArguments(String[] arguments, Console
}
tempFileWriter.close();

runFile(temp.getAbsolutePath(), reader);
runFile(temp.getAbsolutePath(), template, delimiter, reader);
processInput("exit", reader);

} catch (Exception e) {
Expand All @@ -926,25 +949,33 @@ protected boolean postProcessInitializationArguments(String[] arguments, Console
return rtn;
}

public void runFile(String set, ConsoleReader reader) {
logv(getEnv(), "-- Running source file: " + set);
public void runFile(String inSet, String template, String delimiter, ConsoleReader reader) {
logv(getEnv(), "-- Running source file: " + inSet);

String localFile = null;

if (set.startsWith("/")) {
localFile = set;
// Absolute Path
if (inSet.startsWith("/")) {
localFile = inSet;
} else {
// Relative Path
org.apache.hadoop.fs.FileSystem localfs = (org.apache.hadoop.fs.FileSystem) getEnv().getValue(Constants.LOCAL_FS);
// org.apache.hadoop.fs.FileSystem hdfs = (org.apache.hadoop.fs.FileSystem) getEnv().getValue(Constants.HDFS);

String localwd = localfs.getWorkingDirectory().toString();
// String hdfswd = hdfs.getWorkingDirectory().toString();

// Remove 'file:' from working directory.
localFile = localwd.split(":")[1] + System.getProperty("file.separator") + set;
localFile = localwd.split(":")[1] + System.getProperty("file.separator") + inSet;
}
File setFile = new File(localFile);

MessageFormat messageFormat = null;
if (template != null) {
messageFormat = new MessageFormat(template);
}
String lclDelimiter = null;
if (delimiter == null) {
lclDelimiter = ",";
}
if (!setFile.exists()) {
loge(getEnv(), "File not found: " + setFile.getAbsoluteFile());
} else {
Expand All @@ -955,10 +986,11 @@ public void runFile(String set, ConsoleReader reader) {
logv(getEnv(), line);
String line2 = line.trim();
if (line2.length() > 0 && !line2.startsWith("#")) {
if (messageFormat != null) {
String[] items = line2.split(lclDelimiter);
line2 = messageFormat.format(items);
}
CommandReturn cr = processInput(line2, reader);
// Handled at process level to extract details from processor
// if (res != 0)
// loge(getEnv(), "Non-Zero Return Code: " + Integer.toString(res));
}
}
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ public Options getOptions() {
opts.addOption("ignorecrc", false, "ignorecrc");
opts.addOption("crc", false, "crc");

// opts.addOption("c", true, "Comment");
// opts.addOption("ignore-fail-on-non-empty", false, "ignore-fail-on-non-empty");
return opts;
}
Expand Down
32 changes: 29 additions & 3 deletions src/main/java/com/streever/hadoop/hdfs/util/HdfsSource.java
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,25 @@ public CommandReturn implementation(Environment env, CommandLine cmd, ConsoleRea
Option[] cmdOpts = cmd.getOptions();
String[] cmdArgs = cmd.getArgs();

String template = null;
String delimiter = null;
if (cmd.hasOption("t")) {
template = cmd.getOptionValue("t");
}
if (cmd.hasOption("d")) {
delimiter = cmd.getOptionValue("d");
}
if (cmd.hasOption("lf")) {
runSource(cmd.getOptionValue("lf"), reader);
runSource(cmd.getOptionValue("lf"), template, delimiter, reader);
}

logv(env,"'Source' complete.");

return CommandReturn.GOOD;
}

private void runSource(String sourceFile, ConsoleReader reader) {
this.shell.runFile(sourceFile,reader);
private void runSource(String sourceFile, String template, String delimiter, ConsoleReader reader) {
this.shell.runFile(sourceFile,template, delimiter, reader);
}


Expand All @@ -111,6 +119,24 @@ public Options getOptions() {
.build();
opts.addOption(lfileOption);

Option templateOption = Option.builder("t").required(false)
.argName("template")
.desc("Message Template")
.hasArg(true)
.numberOfArgs(1)
.longOpt("template")
.build();
opts.addOption(templateOption);

Option delimiterOption = Option.builder("d").required(false)
.argName("delimiter")
.desc("delimiter")
.hasArg(true)
.numberOfArgs(1)
.longOpt("delimiter")
.build();
opts.addOption(delimiterOption);

// TODO: Add Distributed File Source
// Option dfileOption = Option.builder("df").required(false)
// .argName("source distributed file")
Expand Down

0 comments on commit 638ef6a

Please sign in to comment.