类似于Google的GFS,Hadoop分布式文件系统(HDFS)被设计成适合运行在通用硬件(commodity hardware)上的分布式文件系统。它和现有的分布式文件系统有很多共同点。但同时,它和其他的分布式文件系统的区别也是很明显的。HDFS是一个高度容错性的系统,适合部署在廉价的机器上。HDFS能提供高吞吐量的数据访问,非常适合大规模数据集上的应用。HDFS放宽了一部分POSIX约束,来实现流式读取文件系统数据的目的。HDFS在最开始是作为Apache Nutch搜索引擎项目的基础架构而开发的。HDFS是Apache Hadoop Core项目的一部分。

1、App1.java

package hdfs;import java.io.InputStream;
import java.net.URL;import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;public class App1 {/*** 抛异常: unknown host: chaoren* 原因:是因为本机没有解析主机名chaoren*/static final String PATH = "hdfs://chaoren:9000/hello";public static void main(String[] args) throws Exception {URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());final URL url = new URL(PATH);final InputStream in = url.openStream();/*** @param in    表示输入流* @param out  表示输出流* @param buffSize 表示缓冲大小* @param close 表示在传输结束后是否关闭流*/IOUtils.copyBytes(in, System.out, 1024, true);}
}

2、App2.java

package hdfs;import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;public class App2 {static final String PATH = "hdfs://chaoren:9000/";static final String DIR = "/d1";static final String FILE = "/d1/hello";public static void main(String[] args) throws Exception {FileSystem fileSystem = getFileSystem();//创建文件夹     hadoop fs -mkdir   /f1mkdir(fileSystem);//上传文件  -put  src  desputData(fileSystem);//下载文件   hadoop fs -get src des//getData(fileSystem);//浏览文件夹list(fileSystem);//删除文件夹//remove(fileSystem);}private static void list(FileSystem fileSystem) throws IOException {final FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));for (FileStatus fileStatus : listStatus) {String isDir = fileStatus.isDir()?"文件夹":"文件";final String permission = fileStatus.getPermission().toString();final short replication = fileStatus.getReplication();final long len = fileStatus.getLen();final String path = fileStatus.getPath().toString();System.out.println(isDir+"\t"+permission+"\t"+replication+"\t"+len+"\t"+path);}}private static void getData(FileSystem fileSystem) throws IOException {final FSDataInputStream in = fileSystem.open(new Path(FILE));IOUtils.copyBytes(in, System.out, 1024, true);}private static void putData(FileSystem fileSystem) throws IOException,FileNotFoundException {final FSDataOutputStream out = fileSystem.create(new Path(FILE));final FileInputStream in = new FileInputStream("H:/kuaipan/hadoop/classes/yy131009/day2/readme.txt");IOUtils.copyBytes(in, out, 1024, true);}private static void remove(FileSystem fileSystem) throws IOException {fileSystem.delete(new Path(DIR), true);}private static void mkdir(FileSystem fileSystem) throws IOException {fileSystem.mkdirs(new Path(DIR));}private static FileSystem getFileSystem() throws IOException, URISyntaxException {return FileSystem.get(new URI(PATH), new Configuration());}
}

3、FileUtil.java

package org.apache.hadoop.fs;
/*** Licensed to the Apache Software Foundation (ASF) under one* or more contributor license agreements.  See the NOTICE file* distributed with this work for additional information* regarding copyright ownership.  The ASF licenses this file* to you under the Apache License, Version 2.0 (the* "License"); you may not use this file except in compliance* with the License.  You may obtain a copy of the License at**     http://www.apache.org/licenses/LICENSE-2.0** Unless required by applicable law or agreed to in writing, software* distributed under the License is distributed on an "AS IS" BASIS,* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.* See the License for the specific language governing permissions and* limitations under the License.*/import java.io.*;
import java.util.Enumeration;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;/*** A collection of file-processing util methods*/
public class FileUtil {private static final Log LOG = LogFactory.getLog(FileUtil.class);/*** convert an array of FileStatus to an array of Path* * @param stats*          an array of FileStatus objects* @return an array of paths corresponding to the input*/public static Path[] stat2Paths(FileStatus[] stats) {if (stats == null)return null;Path[] ret = new Path[stats.length];for (int i = 0; i < stats.length; ++i) {ret[i] = stats[i].getPath();}return ret;}/*** convert an array of FileStatus to an array of Path.* If stats if null, return path* @param stats*          an array of FileStatus objects* @param path*          default path to return in stats is null* @return an array of paths corresponding to the input*/public static Path[] stat2Paths(FileStatus[] stats, Path path) {if (stats == null)return new Path[]{path};elsereturn stat2Paths(stats);}/*** Delete a directory and all its contents.  If* we return false, the directory may be partially-deleted.*/public static boolean fullyDelete(File dir) throws IOException {if (!fullyDeleteContents(dir)) {return false;}return dir.delete();}/*** Delete the contents of a directory, not the directory itself.  If* we return false, the directory may be partially-deleted.*/public static boolean fullyDeleteContents(File dir) throws IOException {boolean deletionSucceeded = true;File contents[] = dir.listFiles();if (contents != null) {for (int i = 0; i < contents.length; i++) {if (contents[i].isFile()) {if (!contents[i].delete()) {deletionSucceeded = false;continue; // continue deletion of other files/dirs under dir}} else {//try deleting the directory// this might be a symlinkboolean b = false;b = contents[i].delete();if (b){//this was indeed a symlink or an empty directorycontinue;}// if not an empty directory or symlink let// fullydelete handle it.if (!fullyDelete(contents[i])) {deletionSucceeded = false;continue; // continue deletion of other files/dirs under dir}}}}return deletionSucceeded;}/*** Recursively delete a directory.* * @param fs {@link FileSystem} on which the path is present* @param dir directory to recursively delete * @throws IOException* @deprecated Use {@link FileSystem#delete(Path, boolean)}*/@Deprecatedpublic static void fullyDelete(FileSystem fs, Path dir) throws IOException {fs.delete(dir, true);}//// If the destination is a subdirectory of the source, then// generate exception//private static void checkDependencies(FileSystem srcFS, Path src, FileSystem dstFS, Path dst)throws IOException {if (srcFS == dstFS) {String srcq = src.makeQualified(srcFS).toString() + Path.SEPARATOR;String dstq = dst.makeQualified(dstFS).toString() + Path.SEPARATOR;if (dstq.startsWith(srcq)) {if (srcq.length() == dstq.length()) {throw new IOException("Cannot copy " + src + " to itself.");} else {throw new IOException("Cannot copy " + src + " to its subdirectory " +dst);}}}}/** Copy files between FileSystems. */public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource,Configuration conf) throws IOException {return copy(srcFS, src, dstFS, dst, deleteSource, true, conf);}public static boolean copy(FileSystem srcFS, Path[] srcs, FileSystem dstFS, Path dst,boolean deleteSource, boolean overwrite, Configuration conf)throws IOException {boolean gotException = false;boolean returnVal = true;StringBuffer exceptions = new StringBuffer();if (srcs.length == 1)return copy(srcFS, srcs[0], dstFS, dst, deleteSource, overwrite, conf);// Check if dest is directoryif (!dstFS.exists(dst)) {throw new IOException("`" + dst +"': specified destination directory " +"doest not exist");} else {FileStatus sdst = dstFS.getFileStatus(dst);if (!sdst.isDir()) throw new IOException("copying multiple files, but last argument `" +dst + "' is not a directory");}for (Path src : srcs) {try {if (!copy(srcFS, src, dstFS, dst, deleteSource, overwrite, conf))returnVal = false;} catch (IOException e) {gotException = true;exceptions.append(e.getMessage());exceptions.append("\n");}}if (gotException) {throw new IOException(exceptions.toString());}return returnVal;}/** Copy files between FileSystems. */public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource,boolean overwrite,Configuration conf) throws IOException {dst = checkDest(src.getName(), dstFS, dst, overwrite);if (srcFS.getFileStatus(src).isDir()) {checkDependencies(srcFS, src, dstFS, dst);if (!dstFS.mkdirs(dst)) {return false;}FileStatus contents[] = srcFS.listStatus(src);for (int i = 0; i < contents.length; i++) {copy(srcFS, contents[i].getPath(), dstFS, new Path(dst, contents[i].getPath().getName()),deleteSource, overwrite, conf);}} else if (srcFS.isFile(src)) {InputStream in=null;OutputStream out = null;try {in = srcFS.open(src);out = dstFS.create(dst, overwrite);IOUtils.copyBytes(in, out, conf, true);} catch (IOException e) {IOUtils.closeStream(out);IOUtils.closeStream(in);throw e;}} else {throw new IOException(src.toString() + ": No such file or directory");}if (deleteSource) {return srcFS.delete(src, true);} else {return true;}}/** Copy all files in a directory to one output file (merge). */public static boolean copyMerge(FileSystem srcFS, Path srcDir, FileSystem dstFS, Path dstFile, boolean deleteSource,Configuration conf, String addString) throws IOException {dstFile = checkDest(srcDir.getName(), dstFS, dstFile, false);if (!srcFS.getFileStatus(srcDir).isDir())return false;OutputStream out = dstFS.create(dstFile);try {FileStatus contents[] = srcFS.listStatus(srcDir);for (int i = 0; i < contents.length; i++) {if (!contents[i].isDir()) {InputStream in = srcFS.open(contents[i].getPath());try {IOUtils.copyBytes(in, out, conf, false);if (addString!=null)out.write(addString.getBytes("UTF-8"));} finally {in.close();} }}} finally {out.close();}if (deleteSource) {return srcFS.delete(srcDir, true);} else {return true;}}  /** Copy local files to a FileSystem. */public static boolean copy(File src,FileSystem dstFS, Path dst,boolean deleteSource,Configuration conf) throws IOException {dst = checkDest(src.getName(), dstFS, dst, false);if (src.isDirectory()) {if (!dstFS.mkdirs(dst)) {return false;}File contents[] = listFiles(src);for (int i = 0; i < contents.length; i++) {copy(contents[i], dstFS, new Path(dst, contents[i].getName()),deleteSource, conf);}} else if (src.isFile()) {InputStream in = null;OutputStream out =null;try {in = new FileInputStream(src);out = dstFS.create(dst);IOUtils.copyBytes(in, out, conf);} catch (IOException e) {IOUtils.closeStream( out );IOUtils.closeStream( in );throw e;}} else {throw new IOException(src.toString() + ": No such file or directory");}if (deleteSource) {return FileUtil.fullyDelete(src);} else {return true;}}/** Copy FileSystem files to local files. */public static boolean copy(FileSystem srcFS, Path src, File dst, boolean deleteSource,Configuration conf) throws IOException {if (srcFS.getFileStatus(src).isDir()) {if (!dst.mkdirs()) {return false;}FileStatus contents[] = srcFS.listStatus(src);for (int i = 0; i < contents.length; i++) {copy(srcFS, contents[i].getPath(), new File(dst, contents[i].getPath().getName()),deleteSource, conf);}} else if (srcFS.isFile(src)) {InputStream in = srcFS.open(src);IOUtils.copyBytes(in, new FileOutputStream(dst), conf);} else {throw new IOException(src.toString() + ": No such file or directory");}if (deleteSource) {return srcFS.delete(src, true);} else {return true;}}private static Path checkDest(String srcName, FileSystem dstFS, Path dst,boolean overwrite) throws IOException {if (dstFS.exists(dst)) {FileStatus sdst = dstFS.getFileStatus(dst);if (sdst.isDir()) {if (null == srcName) {throw new IOException("Target " + dst + " is a directory");}return checkDest(null, dstFS, new Path(dst, srcName), overwrite);} else if (!overwrite) {throw new IOException("Target " + dst + " already exists");}}return dst;}/*** This class is only used on windows to invoke the cygpath command.*/private static class CygPathCommand extends Shell {String[] command;String result;CygPathCommand(String path) throws IOException {command = new String[]{"cygpath", "-u", path};run();}String getResult() throws IOException {return result;}protected String[] getExecString() {return command;}protected void parseExecResult(BufferedReader lines) throws IOException {String line = lines.readLine();if (line == null) {throw new IOException("Can't convert '" + command[2] + " to a cygwin path");}result = line;}}/*** Convert a os-native filename to a path that works for the shell.* @param filename The filename to convert* @return The unix pathname* @throws IOException on windows, there can be problems with the subprocess*/public static String makeShellPath(String filename) throws IOException {if (Path.WINDOWS) {return new CygPathCommand(filename).getResult();} else {return filename;}    }/*** Convert a os-native filename to a path that works for the shell.* @param file The filename to convert* @return The unix pathname* @throws IOException on windows, there can be problems with the subprocess*/public static String makeShellPath(File file) throws IOException {return makeShellPath(file, false);}/*** Convert a os-native filename to a path that works for the shell.* @param file The filename to convert* @param makeCanonicalPath *          Whether to make canonical path for the file passed* @return The unix pathname* @throws IOException on windows, there can be problems with the subprocess*/public static String makeShellPath(File file, boolean makeCanonicalPath) throws IOException {if (makeCanonicalPath) {return makeShellPath(file.getCanonicalPath());} else {return makeShellPath(file.toString());}}/*** Takes an input dir and returns the du on that local directory. Very basic* implementation.* * @param dir*          The input dir to get the disk space of this local dir* @return The total disk space of the input local directory*/public static long getDU(File dir) {long size = 0;if (!dir.exists())return 0;if (!dir.isDirectory()) {return dir.length();} else {File[] allFiles = dir.listFiles();if(allFiles != null) {for (int i = 0; i < allFiles.length; i++) {boolean isSymLink;try {isSymLink = org.apache.commons.io.FileUtils.isSymlink(allFiles[i]);} catch(IOException ioe) {isSymLink = true;}if(!isSymLink) {size += getDU(allFiles[i]);}}}return size;}}/*** Given a File input it will unzip the file in a the unzip directory* passed as the second parameter* @param inFile The zip file as input* @param unzipDir The unzip directory where to unzip the zip file.* @throws IOException*/public static void unZip(File inFile, File unzipDir) throws IOException {Enumeration<? extends ZipEntry> entries;ZipFile zipFile = new ZipFile(inFile);try {entries = zipFile.entries();while (entries.hasMoreElements()) {ZipEntry entry = entries.nextElement();if (!entry.isDirectory()) {InputStream in = zipFile.getInputStream(entry);try {File file = new File(unzipDir, entry.getName());if (!file.getParentFile().mkdirs()) {           if (!file.getParentFile().isDirectory()) {throw new IOException("Mkdirs failed to create " + file.getParentFile().toString());}}OutputStream out = new FileOutputStream(file);try {byte[] buffer = new byte[8192];int i;while ((i = in.read(buffer)) != -1) {out.write(buffer, 0, i);}} finally {out.close();}} finally {in.close();}}}} finally {zipFile.close();}}/*** Given a Tar File as input it will untar the file in a the untar directory* passed as the second parameter* * This utility will untar ".tar" files and ".tar.gz","tgz" files.*  * @param inFile The tar file as input. * @param untarDir The untar directory where to untar the tar file.* @throws IOException*/public static void unTar(File inFile, File untarDir) throws IOException {if (!untarDir.mkdirs()) {           if (!untarDir.isDirectory()) {throw new IOException("Mkdirs failed to create " + untarDir);}}StringBuffer untarCommand = new StringBuffer();boolean gzipped = inFile.toString().endsWith("gz");if (gzipped) {untarCommand.append(" gzip -dc '");untarCommand.append(FileUtil.makeShellPath(inFile));untarCommand.append("' | (");} untarCommand.append("cd '");untarCommand.append(FileUtil.makeShellPath(untarDir)); untarCommand.append("' ; ");untarCommand.append("tar -xf ");if (gzipped) {untarCommand.append(" -)");} else {untarCommand.append(FileUtil.makeShellPath(inFile));}String[] shellCmd = { "bash", "-c", untarCommand.toString() };ShellCommandExecutor shexec = new ShellCommandExecutor(shellCmd);shexec.execute();int exitcode = shexec.getExitCode();if (exitcode != 0) {throw new IOException("Error untarring file " + inFile + ". Tar process exited with exit code " + exitcode);}}/*** Create a soft link between a src and destination* only on a local disk. HDFS does not support this* @param target the target for symlink * @param linkname the symlink* @return value returned by the command*/public static int symLink(String target, String linkname) throws IOException{String cmd = "ln -s " + target + " " + linkname;Process p = Runtime.getRuntime().exec(cmd, null);int returnVal = -1;try{returnVal = p.waitFor();} catch(InterruptedException e){//do nothing as of yet}if (returnVal != 0) {LOG.warn("Command '" + cmd + "' failed " + returnVal + " with: " + copyStderr(p));}return returnVal;}private static String copyStderr(Process p) throws IOException {InputStream err = p.getErrorStream();StringBuilder result = new StringBuilder();byte[] buff = new byte[4096];int len = err.read(buff);while (len > 0) {result.append(new String(buff, 0 , len));len = err.read(buff);}return result.toString();}/*** Change the permissions on a filename.* @param filename the name of the file to change* @param perm the permission string* @return the exit code from the command* @throws IOException* @throws InterruptedException*/public static int chmod(String filename, String perm) throws IOException, InterruptedException {return chmod(filename, perm, false);}/*** Change the permissions on a file / directory, recursively, if* needed.* @param filename name of the file whose permissions are to change* @param perm permission string* @param recursive true, if permissions should be changed recursively* @return the exit code from the command.* @throws IOException* @throws InterruptedException*/public static int chmod(String filename, String perm, boolean recursive)throws IOException {StringBuffer cmdBuf = new StringBuffer();cmdBuf.append("chmod ");if (recursive) {cmdBuf.append("-R ");}cmdBuf.append(perm).append(" ");cmdBuf.append(filename);String[] shellCmd = {"bash", "-c" ,cmdBuf.toString()};ShellCommandExecutor shExec = new ShellCommandExecutor(shellCmd);try {shExec.execute();}catch(IOException e) {if(LOG.isDebugEnabled()) {LOG.debug("Error while changing permission : " + filename +" Exception: " + StringUtils.stringifyException(e));}}return shExec.getExitCode();}/*** Set permissions to the required value. Uses the java primitives instead* of forking if group == other.* @param f the file to change* @param permission the new permissions* @throws IOException*/public static void setPermission(File f, FsPermission permission) throws IOException {FsAction user = permission.getUserAction();FsAction group = permission.getGroupAction();FsAction other = permission.getOtherAction();// use the native/fork if the group/other permissions are different// or if the native is available    if (group != other || NativeIO.isAvailable()) {execSetPermission(f, permission);return;}boolean rv = true;// read permsrv = f.setReadable(group.implies(FsAction.READ), false);checkReturnValue(rv, f, permission);if (group.implies(FsAction.READ) != user.implies(FsAction.READ)) {f.setReadable(user.implies(FsAction.READ), true);checkReturnValue(rv, f, permission);}// write permsrv = f.setWritable(group.implies(FsAction.WRITE), false);checkReturnValue(rv, f, permission);if (group.implies(FsAction.WRITE) != user.implies(FsAction.WRITE)) {f.setWritable(user.implies(FsAction.WRITE), true);checkReturnValue(rv, f, permission);}// exec permsrv = f.setExecutable(group.implies(FsAction.EXECUTE), false);checkReturnValue(rv, f, permission);if (group.implies(FsAction.EXECUTE) != user.implies(FsAction.EXECUTE)) {f.setExecutable(user.implies(FsAction.EXECUTE), true);checkReturnValue(rv, f, permission);}}private static void checkReturnValue(boolean rv, File p, FsPermission permission) throws IOException {}private static void execSetPermission(File f, FsPermission permission)  throws IOException {if (NativeIO.isAvailable()) {NativeIO.chmod(f.getCanonicalPath(), permission.toShort());} else {execCommand(f, Shell.SET_PERMISSION_COMMAND,String.format("%04o", permission.toShort()));}}static String execCommand(File f, String... cmd) throws IOException {String[] args = new String[cmd.length + 1];System.arraycopy(cmd, 0, args, 0, cmd.length);args[cmd.length] = f.getCanonicalPath();String output = Shell.execCommand(args);return output;}/*** Create a tmp file for a base file.* @param basefile the base file of the tmp* @param prefix file name prefix of tmp* @param isDeleteOnExit if true, the tmp will be deleted when the VM exits* @return a newly created tmp file* @exception IOException If a tmp file cannot created* @see java.io.File#createTempFile(String, String, File)* @see java.io.File#deleteOnExit()*/public static final File createLocalTempFile(final File basefile,final String prefix,final boolean isDeleteOnExit)throws IOException {File tmp = File.createTempFile(prefix + basefile.getName(),"", basefile.getParentFile());if (isDeleteOnExit) {tmp.deleteOnExit();}return tmp;}/*** Move the src file to the name specified by target.* @param src the source file* @param target the target file* @exception IOException If this operation fails*/public static void replaceFile(File src, File target) throws IOException {/* renameTo() has two limitations on Windows platform.* src.renameTo(target) fails if* 1) If target already exists OR* 2) If target is already open for reading/writing.*/if (!src.renameTo(target)) {int retries = 5;while (target.exists() && !target.delete() && retries-- >= 0) {try {Thread.sleep(1000);} catch (InterruptedException e) {throw new IOException("replaceFile interrupted.");}}if (!src.renameTo(target)) {throw new IOException("Unable to rename " + src +" to " + target);}}}/*** A wrapper for {@link File#listFiles()}. This java.io API returns null * when a dir is not a directory or for any I/O error. Instead of having* null check everywhere File#listFiles() is used, we will add utility API* to get around this problem. For the majority of cases where we prefer * an IOException to be thrown.* @param dir directory for which listing should be performed* @return list of files or empty list* @exception IOException for invalid directory or for a bad disk.*/public static File[] listFiles(File dir) throws IOException {File[] files = dir.listFiles();if(files == null) {throw new IOException("Invalid directory or I/O error occurred for dir: "+ dir.toString());}return files;}/*** A wrapper for {@link File#list()}. This java.io API returns null * when a dir is not a directory or for any I/O error. Instead of having* null check everywhere File#list() is used, we will add utility API* to get around this problem. For the majority of cases where we prefer * an IOException to be thrown.* @param dir directory for which listing should be performed* @return list of file names or empty string list* @exception IOException for invalid directory or for a bad disk.*/public static String[] list(File dir) throws IOException {String[] fileNames = dir.list();if(fileNames == null) {throw new IOException("Invalid directory or I/O error occurred for dir: "+ dir.toString());}return fileNames;}
}

4、MyServer.java

package rpc;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Server;public class MyServer {static final String ADDRESS = "localhost";static final int PORT = 12345;public static void main(String[] args)throws Exception {/** * 构造一个RPC的服务端.* @param instance 这个实例中的方法会被调用* @param bindAddress 绑定的地址是用于监听连接的* @param port 绑定的端口是用于监听连接的* @param conf the configuration to use*/final Server server = RPC.getServer(new MyBiz(), ADDRESS, PORT, new Configuration());server.start();}}

5、MyClient.java

package rpc;import java.net.InetSocketAddress;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;public class MyClient {public static void main(String[] args) throws Exception{/** * 构造一个客户端代理对象,该代理对象实现了命名的协议。代理对象会与指定地址的服务端通话*/MyBizable proxy = (MyBizable)RPC.waitForProxy(MyBizable.class,MyBizable.VERSION,new InetSocketAddress(MyServer.ADDRESS, MyServer.PORT),new Configuration());final String result = proxy.hello("world");System.out.println("客户端结果:"+result);//关闭网络连接RPC.stopProxy(proxy);}}

6、MyBizable.java

package rpc;import org.apache.hadoop.ipc.VersionedProtocol;public interface MyBizable extends VersionedProtocol{long VERSION = 2345245L;public abstract String hello(String name);
}

7、MyBiz.java

package rpc;import java.io.IOException;public class MyBiz implements  MyBizable{/* (non-Javadoc)* @see rpc.MyBizable#hello(java.lang.String)*/@Overridepublic String hello(String name){System.out.println("我被调用了");return "hello "+name;}/* (non-Javadoc)* @see rpc.MyBizable#getProtocolVersion(java.lang.String, long)*/@Overridepublic long getProtocolVersion(String arg0, long arg1) throws IOException {return VERSION;}
}

Hadoop HDFS的一些相关代码code相关推荐

  1. Hadoop MapReduce的一些相关代码Code

    MapReduce是一种分布式计算模型(distributed programming model),由Google于2004年左右提出,主要用于搜索领域,解决海量数据的计算问题. MapReduce ...

  2. Hadoop hdfs 获取文件列表代码示例

  3. Hadoop hdfs创建及删除目录代码示例

  4. Hadoop HDFS文件操作的Java代码

    1.创建目录 import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.ha ...

  5. Hadoop环境下用java代码实现hdfs远程文件的上传和下载

    Hadoop环境下用java代码实现hdfs远程文件的上传和下载 文章目录 Hadoop环境下用java代码实现hdfs远程文件的上传和下载 一.新建maven工程 二.文件的上传 三.文件的下载 四 ...

  6. hadoop HDFS的文件夹创建、文件上传、文件下载、文件夹删除,文件更名、文件详细信息、文件类型判断(文件夹或者文件)

    摘要: 本篇文章主要介绍的是hadoop hdfs的基础api的使用.包括Windows端依赖配置,Maven依赖配置.最后就是进行实际的操作,包括:获取远程hadoop hdfs连接,并对其进行的一 ...

  7. 看完就能独自把集群搭起来!Hadoop HDFS完全分布式环境搭建以及技术详解

    作者 | 慢慢变成大佬 责编 | Carol 出品 | CSDN云计算(ID:CSDNcloud) 在文章开始之前,作者想要告诉大家:读懂本篇文章,能让小白快速入门,并且能够搭建完全分布式的集群,以及 ...

  8. hadoop hdfs记录踩到的坑

    研究了几天终于将hdfs的java api调用搞通了,其中的艰辛一度让我想要放弃,但最终让我坚持了下来.这几天的经验,无疑是宝贵的,故记录下来,以防以后遗忘.我用的是版本2.10.0,你要问我为啥选择 ...

  9. 什么是Hadoop - HDFS - MapReduce - YARN - HA

    Hadoop 为什么要有Hadoop? 从计算机诞生到现今,积累了海量的数据,这些海量的数据有结构化.半结构化.非 结构的数据,并且这些海量的数据存储和检索就成为了一大问题. 我们都知道大数据技术难题 ...

最新文章

  1. 2018年,BAT在人工智能领域交出哪些答卷?
  2. .Lucky后缀勒索病毒数据解密
  3. node爬取app数据_在电销行业中,运营商大数据究竟起着怎样的作用
  4. 面试官:你不懂六大设计原则,回去等通知吧!
  5. 傅里叶变换对噪声进行频谱分析
  6. Yii 关于AR分表
  7. 工程选择LibGdx--开发环境搭建Strut2教程-java教程
  8. seay代码审计工具_“代码审计”了解一下
  9. SAKAI OAE汉化
  10. 基于MATLAB的QAM调制与解调系统设计
  11. LeapFTP访问WinXP上的文件
  12. 这2个方法能将CAJ免费完整转换成Word
  13. 2020 年全球移动 APT事件总结
  14. Leaflet地图 -- 绘制台风风圈
  15. 入职腾讯第九年,我辞职了!!!
  16. 末位淘汰!985高校硕士毕业拟新规:强制20%不通过或需大改?
  17. 如何抓取安卓APP日志?
  18. 软件架构设计原则-开闭、依赖倒置、单一职责、接口隔离、迪米特、里氏替换、合成复用,附Java语言示例讲解
  19. Macbook无法开机备份数据 制作安装系统U盘 安装系统 (图文)
  20. netcore在Linux后台运行at Interop.ThrowExceptionForIoErrno

热门文章

  1. docker tensorflow-jupyter简单使用
  2. Python 是怎么火起来的? 转载 2018年01月12日 00:00:00 133 图:Python 之父 Guido 正在设计 Python 语言,结果家里突然潜入一条大蟒蛇,一番激烈斗争,大
  3. Python 面向对象(初级篇) 2015/09/04 · 基础知识 · 2 评论 · 面向对象 分享到: 24 原文出处: 武沛齐 cnblog Python 面向对象(初级篇) 概述
  4. mysql什么情况会扫描所有_造成MySQL全表扫描的原因
  5. Spring Cloud Alibaba - 08 Ribbon 两种方式实现细粒度自定义配置控制微服务的负载均衡策略
  6. 深入理解分布式技术 - 负载均衡实现之一致性哈希算法
  7. 深入理解分布式技术 - Redis 分布式锁解决方案
  8. 常用的20个正则表达式
  9. 通过 SpringBoot 中使用 lombok 实现自动创建JavaBean的get/set方法、全参/无参构造方法、toString()、equals()、hashCode()
  10. css pc和mobile,初识pc端和mobile端CSS适配利器—vw+rem+CSS locks