import java.util.concurrent.Callable;
import java.io.*;
import java.net.*;
import java.util.zip.*;
import java.util.*;
import java.util.regex.*;
class threadStreamReader extends Thread
{
InputStream is;
String name = "";
List outs;
threadStreamReader(InputStream is, List outs, String name)
{
this.is = is;
this.name = name;
this.outs = outs;
} // threadSreamReader()
public void run()
{
try
{
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line=null;
while ( (line = br.readLine()) != null)
{
outs.add(new String(line));
}
} catch (IOException ioe)
{
ioe.printStackTrace();
}
} // run()
} // class threadStreamReader
public class HadoopFS {
public String command = "";
public List stdout = new ArrayList();
public List stderr = new ArrayList();
public int exitvalue = 0;
public HadoopFS (String c) {
reinit(c);
} // HadoopFS()
public void reinit (String c) {
this.command = new String("cmd /q /c myhadoopfs 192.168.65.128 hadoop-user hadoop " + c);
this.stdout.clear();
this.stderr.clear();
this.exitvalue = 0;
} // reinit()
public void callCommand() {
try {
Process p = Runtime.getRuntime().exec(this.command);
// Note the process blocks when stdout or stderr gets too full.
// We have to read the streams in different threads to prevent this.
// http://www.javaworld.com/javaworld/jw-12-2000/jw-1229-traps.html?page=1
threadStreamReader inputtsr = new threadStreamReader(p.getInputStream(), this.stdout, "STDOUT");
threadStreamReader errortsr = new threadStreamReader(p.getErrorStream(), this.stderr, "STDERR");
inputtsr.start();
errortsr.start();
this.exitvalue = p.waitFor();
} catch (Exception err) {
err.printStackTrace();
}
} // callCommand()
} // class HadoopFS