(8)基于hadoop的简单网盘应用实现4

文件结构

(1)、index.jsp首页面实现

index.jsp

 <%@ include file="head.jsp"%>
 <%@ page language="java" contentType="text/html; charset=UTF-8"
    pageEncoding="UTF-8"%>
<%@page import="org.apache.hadoop.fs.FileStatus"%>    

<body style="text-align:center;margin-bottom:100px;">
		 <div class="navbar" >
	     <div class="navbar-inner">
	       <a class="brand" href="#" style="margin-left:200px;">网盘</a>
	       <ul class="nav">

	         <li><a href="LogoutServlet">退出</a></li>

	       </ul>
	     </div>
	   </div>

	   <div style="margin:0px auto; text-align:left;width:1200px; height:50px;">
	   <form class="form-inline"  method="POST"  enctype="MULTIPART/FORM-DATA"   action="UploadServlet" >
		   <div style="line-height:50px;float:left;">
		  	  <input type="submit" name="submit" value="上传文件"  />
		   </div>
		   <div style="line-height:50px;float:left;">
		  	  <input type="file" name="file1" size="30"/>
		   </div>
	   </form>

	   </div>

      	<div  style="margin:0px auto; width:1200px;height:500px; background:#fff">

	      	<table class="table table-hover" style="width:1000px;margin-left:100px;">
	      		<tr style=" border-bottom:2px solid #ddd">
	      			<td >文件名</td><td style="width:100px">类型</td><td style="width:100px;">大小</td><td style="width:100px;">操作</td><td style="width:100px;">操作</td>
	      		</tr>
	      		<%

			FileStatus[] list = (FileStatus[])request.getAttribute("list");
			if(list != null)
			for (int i=0; i<list.length; i++) {
				%>
					<tr style="border-bottom:1px solid #eee">
					<%
						if(list[i].isDir())
						{
							out.print("<td> <a href=\"DocumentServlet?filePath="+list[i].getPath()+"\">"+list[i].getPath().getName()+"</a></td>");
						}else{
							out.print("<td>"+list[i].getPath().getName()+"</td>");
						}
					%>
 						<td><%= (list[i].isDir()?"目录":"文件") %></td>
						<td><%= list[i].getLen()/1024%></td>
						<td><a href="DeleteFileServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">x</a></td>
						<td><a href="DownloadServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">下载</a></td>

			 		</tr >

			<%
			}
			%>
	      	</table>

      	</div>

</body>
 

(2)document.jsp文件

 <%@ include file="head.jsp"%>
 <%@ page language="java" contentType="text/html; charset=UTF-8"
    pageEncoding="UTF-8"%>
<%@page import="org.apache.hadoop.fs.FileStatus"%>  

<body style="text-align:center;margin-bottom:100px;">
	 <div class="navbar" >
	     <div class="navbar-inner">
	       <a class="brand" href="#" style="margin-left:200px;">网盘</a>
	       <ul class="nav">
	         <li class="active"><a href="#">首页</a></li>
	         <li><a href="#">Link</a></li>
	         <li><a href="#">Link</a></li>
	       </ul>
	     </div>
	   </div>

	   <div style="margin:0px auto; text-align:left;width:1200px; height:50px;">
	   <form class="form-inline"  method="POST"  enctype="MULTIPART/FORM-DATA"   action="UploadServlet" >
		   <div style="line-height:50px;float:left;">
		  	  <input type="submit" name="submit" value="上传文件"  />
		   </div>
		   <div style="line-height:50px;float:left;">
		  	  <input type="file" name="file1" size="30"/>
		   </div>
	   </form>

	   </div>

	<div  style="margin:0px auto; width:1200px;height:500px; background:#fff">
		<table  class="table table-hover"  style="width:1000px;margin-left:100px;">
				<tr><td>文件名</td><td>属性</td><td>大小(KB)</td><td>操作</td><td>操作</td></tr>
				<%

			FileStatus[] list = (FileStatus[])request.getAttribute("documentList");
			if(list != null)
			for (int i=0; i<list.length; i++) {
				%>
					<tr  style=" border-bottom:2px solid #ddd">
					<%
						if(list[i].isDir())
						{
							out.print("<td><a href=\"DocumentServlet?filePath="+list[i].getPath()+"\">"+list[i].getPath().getName()+"</a></td>");
						}else{
							out.print("<td>"+list[i].getPath().getName()+"</td>");
						}
					%>
 						<td><%= (list[i].isDir()?"目录":"文件") %></td>
						<td><%= list[i].getLen()/1024%></td>
						<td><a href="DeleteFileServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">x</a></td>
						<td><a href="DownloadServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),"GB2312") %>">下载</a></td>
			 		</tr>

			<%
			}
			%>
			</table>
</div>
</body>
</html>

(3)DeleteFileServlet 文件

package com.controller;

import java.io.IOException;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;

import com.model.HdfsDAO;
import com.sun.security.ntlm.Server;

/**
 * Servlet implementation class DeleteFileServlet
 */
public class DeleteFileServlet extends HttpServlet {

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {

	    String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");

 		JobConf conf = HdfsDAO.config();
        HdfsDAO hdfs = new HdfsDAO(conf);
        hdfs.rmr(filePath);
        System.out.println("===="+filePath+"====");
         FileStatus[] list = hdfs.ls("/user/root/");
        request.setAttribute("list",list);
		request.getRequestDispatcher("index.jsp").forward(request,response);

	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		this.doGet(request, response);
	}

}

(4)UploadServlet文件

package com.controller;

import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;

import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.PageContext;

import org.apache.commons.fileupload.DiskFileUpload;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;

import com.model.HdfsDAO;

/**
 * Servlet implementation class UploadServlet
 */
public class UploadServlet extends HttpServlet {

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		this.doPost(request, response);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		   request.setCharacterEncoding("UTF-8");
		   File file ;
		   int maxFileSize = 50 * 1024 *1024;  //50M
		   int maxMemSize = 50 * 1024 *1024;    //50M
		   ServletContext context = getServletContext();
		   String filePath = context.getInitParameter("file-upload");
			System.out.println("source file path:"+filePath+"");
		   // 验证上传内容了类型
		   String contentType = request.getContentType();
		   if ((contentType.indexOf("multipart/form-data") >= 0)) {

		      DiskFileItemFactory factory = new DiskFileItemFactory();
		      // 设置内存中存储文件的最大值
		      factory.setSizeThreshold(maxMemSize);
		      // 本地存储的数据大于 maxMemSize.
		      factory.setRepository(new File("c:\\temp"));

		      // 创建一个新的文件上传处理程序
		      ServletFileUpload upload = new ServletFileUpload(factory);
		      // 设置最大上传的文件大小
		      upload.setSizeMax( maxFileSize );
		      try{
		         // 解析获取的文件
		         List fileItems = upload.parseRequest(request);

		         // 处理上传的文件
		         Iterator i = fileItems.iterator();

		         System.out.println("begin to upload file to tomcat server</p>");
		         while ( i.hasNext () )
		         {
		            FileItem fi = (FileItem)i.next();
		            if ( !fi.isFormField () )
		            {
		            // 获取上传文件的参数
		            String fieldName = fi.getFieldName();
		            String fileName = fi.getName();

		            String fn = fileName.substring( fileName.lastIndexOf("\\")+1);
		            System.out.println("<br>"+fn+"<br>");
		            boolean isInMemory = fi.isInMemory();
		            long sizeInBytes = fi.getSize();
		            // 写入文件
		            if( fileName.lastIndexOf("\\") >= 0 ){
		            file = new File( filePath ,
		            fileName.substring( fileName.lastIndexOf("\\"))) ;
		            //out.println("filename"+fileName.substring( fileName.lastIndexOf("\\"))+"||||||");
		            }else{
		            file = new File( filePath ,
		            fileName.substring(fileName.lastIndexOf("\\")+1)) ;
		            }
		            fi.write( file ) ;
		            System.out.println("upload file to tomcat server success!");

		            System.out.println("begin to upload file to hadoop hdfs</p>");
		            //将tomcat上的文件上传到hadoop上
		            String username = (String) request.getSession().getAttribute("username");
		            JobConf conf = HdfsDAO.config();
		            HdfsDAO hdfs = new HdfsDAO(conf);
		            hdfs.copyFile(filePath+"\\"+fn, "/"+username+"/"+fn);
		            System.out.println("upload file to hadoop hdfs success!");

		            System.out.println("username-----"+username);
 		    		FileStatus[] list = hdfs.ls("/"+username);
 		    		 request.setAttribute("list",list);
		            request.getRequestDispatcher("index.jsp").forward(request, response);

		            }
		         }
		      }catch(Exception ex) {
		         System.out.println(ex);
		      }
		   }else{
		      System.out.println("<p>No file uploaded</p>"); 

		   }

	}

}

(5)DownloadServlet文件

package com.controller;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;

import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;

import com.model.HdfsDAO;

/**
 * Servlet implementation class DownloadServlet
 */
public class DownloadServlet extends HttpServlet {
	private static final long serialVersionUID = 1L;

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException  {
		String local = "C:/";
		String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
		System.out.println(filePath);
		JobConf conf = HdfsDAO.config();
        HdfsDAO hdfs = new HdfsDAO(conf);
        hdfs.download(filePath, local);

        FileStatus[] list = hdfs.ls("/user/root/");
        request.setAttribute("list",list);
        request.getRequestDispatcher("index.jsp").forward(request,response);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		this.doGet(request, response);
	}

}

(6)DocumentServlet文件

package com.controller;

import java.io.IOException;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;

import com.model.HdfsDAO;

/**
 * Servlet implementation class DocumentServlet
 */
public class DocumentServlet extends HttpServlet {
	private static final long serialVersionUID = 1L;

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		 String filePath = new String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
		 JobConf conf = HdfsDAO.config();
	     HdfsDAO hdfs = new HdfsDAO(conf);
	     FileStatus[] documentList = hdfs.ls(filePath);
	     request.setAttribute("documentList",documentList);
		 request.getRequestDispatcher("document.jsp").forward(request,response);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		this.doGet(request, response);
	}

}

(7)LougoutServlet文件

package com.controller;

import java.io.IOException;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;

/**
 * Servlet implementation class LogoutServlet
 */
public class LogoutServlet extends HttpServlet {
	private static final long serialVersionUID = 1L;

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		HttpSession session =  request.getSession();
		session.removeAttribute("username");
		request.getRequestDispatcher("login.jsp").forward(request, response);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		 this.doGet(request, response);
	}

}

到此,一个简单的基于hadoop的网盘应用就完成了,如果想把它做的更像一个真正的网盘,大家可以花多点时间去实现剩下的功能。

(8)基于hadoop的简单网盘应用实现4,布布扣,bubuko.com

时间: 2024-10-18 08:47:33

(8)基于hadoop的简单网盘应用实现4的相关文章

(5)基于hadoop的简单网盘应用实现1

hadoop网盘的最终效果见下面,可以实现简单的文件上传.删除.下载功能,不同用户可以登录到自己的页面进行管理. 一.准备的安装包资源 (1)hadoop1.1.2安装包 (2)bootmetro一个CSS开元框架,用来提高web前端的开发效率 (3)mysql的Jdbc驱动包 (4)上传组件 (5)mysql安装包(我的电脑是x64,x86系统的请下载对应版本即可) 二.搭建hadoop集群环境 教程见(2)虚拟机下hadoop1.1.2集群环境搭建 三.mysql5.6安装和eclipse上

(6)基于hadoop的简单网盘应用实现2

一.调用hadoop api实现文件的上传.下载.删除.创建目录和显示功能 (1)添加必要的hadoop jar包. A.首先将Hadoop1.1.2.tar.gz解压到某一个磁盘下. B.右键选择工程,选择build path...., build configure path: C.将hadoop1.1.2文件夹下的jar包添加进去: 还有lib文件夹下的所有jar包(注意:jasper-compiler-5.5.12.jar和jasper-runtime-5.5.12.jar不要引进,否则

(7)基于hadoop的简单网盘应用实现3

一.login.jsp登陆界面实现 解压bootmetro-master.zip,然后将\bootmetro-master\src\下的assets目录复制到project里. bootmetro下载地址:https://github.com/aozora/bootmetro,使用说明:http://www.guoxiaoming.com/bootmetro/ 创建head.jsp文件.用于将一些药固定引用的css.js文件放到这里.作为公共调用文件. <%@ page language="

基于java开源百度网盘搜索引擎

在做一个基于java的开源百度网盘搜索引擎,已经基本完成了 www.java120.com 欢迎大家前往观赏,并多多提出表扬意见....

USB系列之六:基于DOSUSB的简单U盘驱动程序

首先要说明的是,该驱动程序仅实现了部分块设备的功能,如果作为成品软件使用,会感觉性能比较差,而且有些功能(比如FORMAT)是不能完成的,发表此驱动程序的目的旨在说明USB的编程原理以及DOS下驱动程序的工作原理:同时要说明的是,此驱动程序仅支持32M(包括32M)以下的U盘,当然这个问题解决起来并不困难,有兴趣的读者可以在阅读本文并理解的基础上加以改进使其支持32M以上2G以下的U盘.    前面的博文中提到由于DOSUSB是在命令行加载的,如果从config.sys中加载这个基于DOSUSB

[原创] 【2014.12.02更新网盘链接】基于EasySysprep4.1的 Windows 7 x86/x64 『视频』封装

joinlidong 发表于 2014-11-29 14:25:50 https://www.itsk.com/thread-341565-1-4.html 上周末开始做的,结果没做完,零零散散通过视频拼接,每天录一点点,今天终于制作好并上传完毕.上周帖子:https://www.itiankong.net/thread-340974-1-1.html昨天晚上准备全部录完然后上传,结果我的渲染工具出现问题,后面几段没了声音,我着急又重新录了一遍,发现漏了一些细节,不过还好,大家见谅.大神勿喷,只

2本Hadoop技术内幕电子书百度网盘下载:深入理解MapReduce架构设计与实现原理、深入解析Hadoop Common和HDFS架构设计与实现原理

这是我收集的两本关于Hadoop的书,高清PDF版,在此和大家分享: 1.<Hadoop技术内幕:深入理解MapReduce架构设计与实现原理>董西成 著  机械工业出版社2013年5月出版 2.<Hadoop技术内幕:深入解析Hadoop Common和HDFS架构设计与实现原理>蔡斌.陈湘萍 著  机械工业出版社2013年4月出版 百度网盘下载地址: http://pan.baidu.com/s/1sjNmkFj

打造基于hadoop的网站日志分析系统(5)之spark在日志分析系统里的简单应用

1.下载spark和运行 wget http://apache.fayea.com/apache-mirror/spark/spark-1.0.0/spark-1.0.0-bin-hadoop2.tgz 我这里下载的是1.0.0版,由于我们只是测试spark的用法所以不需要配置spark集群,只需把下好的文件解压,进入bin/文件夹. spark支持scala,java和python. scala和java输入命令:./spark-shell python 输入命令 ./pyspark 进入控制

1分钟学会百度网盘不限速教程,简单易懂

由于百度网盘的限速,下载个资料速度堪比蜗牛,不想冲会员,又想要高速下载百度网盘资源怎么办,今天给大家带来的1分钟学会百度网盘不限速教程,简单易懂,亲测有效. https://www.macdown.com 由网友"哩呵"制作的网盘助手脚本,需要通过拓展 Violentmonkey (暴力猴)或者 Tampermonkey (油猴)来启用,原理是通过显示直链,然后使用Neat Download Manager Mac来加速下载. 使用方法:(安装部分) 1.安装浏览器插件,Violent