示例代码如下:

package h5.all.demo;import java.io.File;
import java.io.FileReader;
import java.io.LineNumberReader;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;public class Example {private static String FILENAME = "G:/txt_write_hdf5/market_20090104.h5";//将.h5文件写入到该路径下private static String PATH="/";//传入跟组private static int count = 0 ;private static int file_id = -1;private static int dataset_id = -1;public static final long DIM0 = 100000;//设置数据集行数private static int DIMC=100000;private static int DIME=0;private static long[] dims = {DIMC};//初始大小private static long[] extdims = { DIME };private static final int CHUNK_X = 4;private static final int CHUNK_Y = 4;private static final int NDIMS = 1;private static final int RANK = 1;//维度protected static final int INTEGERSIZE = 4;protected static final int LONGSIZE=8;protected static final int FLOATSIZE=4;protected static final int DOUBLESIZE = 8;protected final static int MAXSTRINGSIZE = 80;private static byte[] dset_data;private static ByteBuffer outBuf;static class Sensor_Datatype {static int numberMembers = 5;//表示列项数static int[] memberDims = { 1, 1, 1, 1 , 1 };static String[] memberNames = { "trading_day", "updatetime", "instrument_id", "gap_number", "reserve" };//数据元素为具体的表字段名称static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT,HDF5Constants.H5T_NATIVE_LONG, HDF5Constants.H5T_NATIVE_FLOAT, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_C_S1 };//用于定义每个字段元素的类型  指定字段元素类型的时候注意类型长度,如果小于存储数长度,则会发生数据溢出static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_STD_I64BE, HDF5Constants.H5T_IEEE_F32BE, HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_C_S1 };//对应的字段类型的大小static int[] memberStorage = { INTEGERSIZE, LONGSIZE, FLOATSIZE, DOUBLESIZE, MAXSTRINGSIZE  };//定义对应类型的长度大小// Data size is the storage size for the members.static long getTotalDataSize() {long data_size = 0;for (int indx = 0; indx < numberMembers; indx++)data_size += memberStorage[indx] * memberDims[indx];return count * data_size;}static long getDataSize() {long data_size = 0;for (int indx = 0; indx < numberMembers; indx++)data_size += memberStorage[indx] * memberDims[indx];return data_size;}static int getOffset(int memberItem) {int data_offset = 0;for (int indx = 0; indx < memberItem; indx++)data_offset += memberStorage[indx];return data_offset;}//读取.txt文本数据的行数  public static int readTxtLineNum(String path) {int dataCount=-1;//置计数初始值为-1,用于抵消表头字段信息try {File file=new File(path);if(file.exists()) {FileReader fr=new FileReader(file);LineNumberReader lnr=new LineNumberReader(fr);while(null != lnr.readLine()) {dataCount++;}lnr.close();}else {System.out.println("文件不存在!");}} catch (Exception e) {e.printStackTrace();}return dataCount;}//读取.h5文件中的行数public static long readH5LineNum(String FILENAME,String DATASETNAME) {int file_id = -1;int dataspace_id = -1;int dataset_id = -1;long[] dims = { DIM0 };// Open an existing file.try {file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);}catch (Exception e) {e.printStackTrace();}try {if (file_id >= 0)dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);}catch (Exception e) {e.printStackTrace();}// Get dataspace and allocate memory for read buffer.try {if (dataset_id >= 0)dataspace_id = H5.H5Dget_space(dataset_id);}catch (Exception e) {e.printStackTrace();}try {if (dataspace_id >= 0)H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);}catch (Exception e) {e.printStackTrace();}try { if (dataset_id >= 0) H5.H5Dclose(dataset_id); } catch (Exception e) {e.printStackTrace();}// Terminate access to the data space.try { if (dataspace_id >= 0) H5.H5Sclose(dataspace_id); } catch (Exception e){ e.printStackTrace(); }// Close the file.try { if (file_id >= 0) H5.H5Fclose(file_id); } catch (Exception e) {e.printStackTrace(); }return dims[0];}}static class Sensor {public Integer trading_day;public Long updatetime;public Float instrument_id;public Double gap_number;public String reserve;public Sensor() {}public Sensor(Integer trading_day, Long updatetime, Float instrument_id, Double gap_number, String reserve) {super();this.trading_day = trading_day;this.updatetime = updatetime;this.instrument_id = instrument_id;this.gap_number = gap_number;this.reserve = reserve;}public Integer getTrading_day() {return trading_day;}public void setTrading_day(Integer trading_day) {this.trading_day = trading_day;}public Long getUpdatetime() {return updatetime;}public void setUpdatetime(Long updatetime) {this.updatetime = updatetime;}public Float getInstrument_id() {return instrument_id;}public void setInstrument_id(Float instrument_id) {this.instrument_id = instrument_id;}public Double getGap_number() {return gap_number;}public void setGap_number(Double gap_number) {this.gap_number = gap_number;}public String getReserve() {return reserve;}public void setReserve(String reserve) {this.reserve = reserve;}//遍历.h5下边的所有数据集,并以数组的方式返回数据集名称public static String[] do_iterate(String FILENAME,String PATH) {int file_id = -1;// Open a file using default properties.try {file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);}catch (Exception e) {e.printStackTrace();}// Begin iteration.
//          System.out.println("Objects in root group:");//----try {if (file_id >= 0) {int count = (int) H5.H5Gn_members(file_id, PATH);String[] oname = new String[count];int[] otype = new int[count];int[] ltype = new int[count];long[] orefs = new long[count];H5.H5Gget_obj_info_all(file_id, PATH, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);// Get type of the object and display its name and type.for (int indx = 0; indx < otype.length; indx++) {switch (H5O_type.get(otype[indx])) {case H5O_TYPE_GROUP:System.out.print("  Group: " + oname[indx]+","+oname.length+" ");break;case H5O_TYPE_DATASET:
//                          System.out.print("  Dataset: " + oname[indx]+","+oname.length+" ");//----break;case H5O_TYPE_NAMED_DATATYPE:System.out.print("  Datatype: " + oname[indx]+","+oname.length+" ");break;default:System.out.print("  Unknown: " + oname[indx]+","+oname.length+" ");}}
//                  System.out.println();//----// Close the file.try {if (file_id >= 0)H5.H5Fclose(file_id);}catch (Exception e) {e.printStackTrace();}return oname;}}catch (Exception e) {e.printStackTrace();}return new String[]{"数据集遍历出错"};}void writeBuffer(ByteBuffer databuf, int dbposition) {//0databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), this.trading_day);//1databuf.putLong(dbposition + Sensor_Datatype.getOffset(1), this.updatetime);//2databuf.putFloat(dbposition + Sensor_Datatype.getOffset(2), this.instrument_id);//3databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), this.gap_number);//4byte[] t_reserve = this.reserve.getBytes(Charset.forName("UTF-8"));int a_reserve = (t_reserve.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : t_reserve.length;for (int ndx = 0; ndx < a_reserve; ndx++)databuf.put(dbposition + Sensor_Datatype.getOffset(4) + ndx, t_reserve[ndx]);for (int ndx = a_reserve; ndx < MAXSTRINGSIZE; ndx++)databuf.put(dbposition + Sensor_Datatype.getOffset(4) + a_reserve, (byte) 0);}//        void readBuffer(ByteBuffer databuf, int dbposition) {
//          //0
//          this.trading_day = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
//          //1
//          this.updatetime = databuf.getLong(dbposition + Sensor_Datatype.getOffset(1));
//          //2
//          this.instrument_id = databuf.getFloat(dbposition + Sensor_Datatype.getOffset(2));
//          //3
//          this.gap_number = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
//          //64
//          ByteBuffer stringbuf_reserve = databuf.duplicate();
//          stringbuf_reserve.position(dbposition + Sensor_Datatype.getOffset(4));
//          stringbuf_reserve.limit(dbposition + Sensor_Datatype.getOffset(4) + MAXSTRINGSIZE);
//          byte[] bytearr_reserve = new byte[stringbuf_reserve.remaining()];
//          stringbuf_reserve.get(bytearr_reserve);
//          this.reserve = new String(bytearr_reserve, Charset.forName("UTF-8")).trim();
//      }@Overridepublic String toString() {return "Sensor [trading_day=" + trading_day + ", updatetime=" + updatetime + ", instrument_id="+ instrument_id + ", gap_number=" + gap_number + ", reserve=" + reserve + "]";}}enum H5O_type {H5O_TYPE_UNKNOWN(-1), // Unknown object typeH5O_TYPE_GROUP(0), // Object is a groupH5O_TYPE_DATASET(1), // Object is a datasetH5O_TYPE_NAMED_DATATYPE(2), // Object is a named data typeH5O_TYPE_NTYPES(3); // Number of different object typesprivate static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();static {for (H5O_type s : EnumSet.allOf(H5O_type.class))lookup.put(s.getCode(), s);}private int code;H5O_type(int layout_type) {this.code = layout_type;}public int getCode() {return this.code;}public static H5O_type get(int code) {return lookup.get(code);}}//将数据写成.h5格式public static void CreateDataset(Sensor object_data,String DATASETNAME,int timeCount,boolean flag,boolean flagDset) {
//      System.out.println("本次输出的对象值为:"+object_data+"数组下标>:"+timeCount);//----int strtype_id = -1;int memtype_id = -1;int filetype_id = -1;int dataspace_id = -1;int dcpl_id = -1;long[] chunk_dims = { CHUNK_X, CHUNK_Y };long[] maxdims = { HDF5Constants.H5S_UNLIMITED };// Initialize data. 读出文本文件中的数据  方法参数,传入一个数据对象数组try {if(flag || file_id >= 0) {//存在 文件直接打开file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);}else {//不存在file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);}} catch (Exception e) {e.printStackTrace();}// Create string datatype.try {strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);if (strtype_id >= 0)H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);}catch (Exception e) {e.printStackTrace();}// Create the compound datatype for memory.try {memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());if (memtype_id >= 0) {for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {int type_id = (int) Sensor_Datatype.memberMemTypes[indx];if (type_id == HDF5Constants.H5T_C_S1)type_id = strtype_id;H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),type_id);}}}catch (Exception e) {e.printStackTrace();}// Create the compound datatype for the file. Because the standard// types we are using for the file may have different sizes than// the corresponding native types, we must manually calculate the// offset of each member.try {filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());if (filetype_id >= 0) {for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {int type_id = (int) Sensor_Datatype.memberFileTypes[indx];if (type_id == HDF5Constants.H5T_C_S1)type_id = strtype_id;H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),type_id);}}}catch (Exception e) {e.printStackTrace();}// Create dataspace. Setting maximum size to NULL sets the maximum// size to be the current size.try {dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);//无限制添加需要设置第三个参数}catch (Exception e) {e.printStackTrace();}// Create the dataset creation property list.try {dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);}catch (Exception e) {e.printStackTrace();}// Set the chunk size.try {if (dcpl_id >= 0)H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);}catch (Exception e) {e.printStackTrace();}// Create the unlimited dataset.try {if(flagDset || dataset_id >= 0) {//数据集存在 直接打开dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);}else if((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0) && (dataset_id < 0)){//数据集不存在 创建dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,dcpl_id, HDF5Constants.H5P_DEFAULT);}}catch (Exception e) {e.printStackTrace();}object_data.writeBuffer(outBuf, timeCount * (int)Sensor_Datatype.getDataSize());try {if ((dataset_id >= 0) && (memtype_id >= 0))H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,HDF5Constants.H5P_DEFAULT, dset_data);}catch (Exception e) {e.printStackTrace();}// End access to the dataset and release resources used by it.try {if (dataset_id >= 0)H5.H5Dclose(dataset_id);}catch (Exception e) {e.printStackTrace();}// Terminate access to the data space.try {if (dataspace_id >= 0)H5.H5Sclose(dataspace_id);}catch (Exception e) {e.printStackTrace();}// Terminate access to the file type.try {if (filetype_id >= 0)H5.H5Tclose(filetype_id);}catch (Exception e) {e.printStackTrace();}// Terminate access to the mem type.try {if (memtype_id >= 0)H5.H5Tclose(memtype_id);}catch (Exception e) {e.printStackTrace();}try {if (strtype_id >= 0)H5.H5Tclose(strtype_id);}catch (Exception e) {e.printStackTrace();}// Close the file.try {if (file_id >= 0)H5.H5Fclose(file_id);}catch (Exception e) {e.printStackTrace();}}private static void extendUnlimited(Sensor object_data,String DATASETNAME,int timeCount,boolean flag,boolean flagDset,int h5Line) {
//      System.out.println("每次传入的数据参数为:"+object_data+"数组下标>:"+timeCount);//----int strtype_id = -1;int memtype_id = -1;int filetype_id = -1;int dataspace_id = -1;long[] start = { 0, 0 };long[] count = new long[2];// Open an existing file.try {file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);}catch (Exception e) {e.printStackTrace();}// Open an existing dataset.try {if (file_id >= 0)dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);}catch (Exception e) {e.printStackTrace();}// Create string datatype.try {strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);if (strtype_id >= 0)H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);}catch (Exception e) {e.printStackTrace();}// Create the compound datatype for memory.try {memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());if (memtype_id >= 0) {for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {//列项数int type_id = (int) Sensor_Datatype.memberMemTypes[indx];if (type_id == HDF5Constants.H5T_C_S1)type_id = strtype_id;H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),type_id);}}}catch (Exception e) {e.printStackTrace();}// Create the compound datatype for the file. Because the standard// types we are using for the file may have different sizes than// the corresponding native types, we must manually calculate the// offset of each member.try {filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());if (filetype_id >= 0) {for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {//列项数int type_id = (int) Sensor_Datatype.memberFileTypes[indx];if (type_id == HDF5Constants.H5T_C_S1)type_id = strtype_id;H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),type_id);}}}catch (Exception e) {e.printStackTrace();}// Extend the dataset.try {if (dataset_id >= 0)H5.H5Dset_extent(dataset_id, extdims);//可能引发错误}catch (Exception e) {e.printStackTrace();}// Retrieve the dataspace for the newly extended dataset.try {if (dataset_id >= 0)dataspace_id = H5.H5Dget_space(dataset_id);}catch (Exception e) {e.printStackTrace();}// Select the entire dataspace.try {if (dataspace_id >= 0) {H5.H5Sselect_all(dataspace_id);// Subtract a hyperslab reflecting the original dimensions from the// selection. The selection now contains only the newly extended// portions of the dataset.count[0] = dims[0];count[1] = 65;H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);object_data.writeBuffer(outBuf, timeCount * (int)Sensor_Datatype.getDataSize());// Write the data to the selected portion of the dataset.if (dataset_id >= 0)H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, dataspace_id,HDF5Constants.H5P_DEFAULT, dset_data);}}catch (Exception e) {e.printStackTrace();}// End access to the dataset and release resources used by it.try {if (dataset_id >= 0)H5.H5Dclose(dataset_id);}catch (Exception e) {e.printStackTrace();}try {if (dataspace_id >= 0)H5.H5Sclose(dataspace_id);}catch (Exception e) {e.printStackTrace();}// Close the file.try {if (file_id >= 0)H5.H5Fclose(file_id);}catch (Exception e) {e.printStackTrace();}}//将本地存入的文本数据遍历读出public static void readLocalExcel() {String url="G:/hdf5_write_txt";File file=new File(url);File[] files=file.listFiles();String dsetString="";if(null != files) {for(File f:files) {//遍历得到文本路径String path="";String filePath=f.getPath();//得到遍历文件名String[] fpStr=filePath.split("\\\\");//进行拆分 重组路径for(int i=0;i<fpStr.length;i++) {dsetString=""+fpStr[fpStr.length-1].split("\\.")[0];if(i != fpStr.length -1) {//路径进行重组path+=fpStr[i]+"/";}else {path+=fpStr[i];}}Example.readAllPath(path,dsetString);}}}//读数据  路径G:/hdf5_write_txt/a0901.txtpublic static int readAllPath(String path,String dsetString) {//判断创建文件在指定路径下是否存在 若文件存在,判断传入的数据集名称是否存在boolean flagE=Example.judgeH5File(FILENAME);//存在-true,不存在-falseboolean flagDsetE=false;if(flagE) {flagDsetE=Example.judgeH5Dset(dsetString);}//读数据String line_record=null;count=0;//每读出一个文件的数据,都要执行一次计数器的清空int h5Line=0;boolean first=false;try {RandomAccessFile raf=new RandomAccessFile(path, "r");int txtLine=Sensor_Datatype.readTxtLineNum(path);//文件行数if(flagE && flagDsetE) {//文件与数据集都存在h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);}while(null != (line_record=raf.readLine())) {//读到数据if(count != 0) {//count=1 ->line_record=new String(line_record.getBytes("UTF-8"),"GBK");Sensor sen=Example.parseRecord(line_record);//每读到一条数据,就将该条数据存入到Sensor对象中int timeCount=count-1;boolean flag=Example.judgeH5File(FILENAME);//存在-true,不存在-falseboolean flagDset=false;if(flag) {flagDset=Example.judgeH5Dset(dsetString);}//如果文件不存在 创建if(!flag || !flagDset) {dims[0]=1;dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];outBuf = ByteBuffer.wrap(dset_data).order(ByteOrder.nativeOrder());Example.CreateDataset(sen,dsetString,timeCount,flag,flagDset);first=true;}//                          //如果文件存在 拼接if(flag && flagDset) {//如果文本中有多于对应.h5的数据,执行写入(文本中含有需要写入的数据,之前不存在与.h5中)if(first) {h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);}
//                              h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);if((h5Line < txtLine) && (DIME <= h5Line)) {//文本中仅有一条数据需要追加  或无数据追加DIME+=1;extdims[0]=h5Line+1;dims[0]=h5Line;dset_data = new byte[(int)extdims[0] * (int)Sensor_Datatype.getDataSize()];outBuf = ByteBuffer.wrap(dset_data).order(ByteOrder.nativeOrder());Example.extendUnlimited(sen, dsetString, timeCount, flag, flagDset,h5Line);//只要进入该方法,表容量扩大一行if(DIME > h5Line && h5Line <= txtLine) {h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);}}}}count+=1;}dataset_id = -1;count-=1;DIME=0;h5Line=0;System.out.println("本次共读出数据"+count+"条");raf.close();} catch (Exception e) {e.printStackTrace();}return count;//文本数据所含数据条数}//判断.h5文件创建路径是否存在要创建的.h5文件public static boolean judgeH5File(String path) {boolean flag=false;//默认为假  不存在设置为falseString[] pathStr=path.split("/");String fileH5Name=pathStr[pathStr.length-1];//得到创建文件名称String url="";//定义指定路径for(int i=0;i<pathStr.length -1;i++) {//去掉最后的创建文件名称if(i != pathStr.length -2) {url+=pathStr[i]+"/";}else {url+=pathStr[i];}}File file=new File(url);String[] fileName=file.list();for(int i=0;i<fileName.length;i++) {//如果该路径下没有文件 则文件不存在.flag使用默认值false  修改多个文件时的处理方式if(fileName[i].equals(fileH5Name)) {//存在,将标志置为trueflag=true;}}return flag;}public static boolean judgeH5Dset(String dsetString) {boolean flagDset=false;//默认为假  不存在设置为falseString[] dsetName=Sensor.do_iterate(FILENAME, PATH);for(int i=0;i<dsetName.length;i++) {if(dsetString.equals(dsetName[i])) {//存在 trueflagDset=true;}}return flagDset;}//读取数据,拆分字符串,放入到对象中public static Sensor parseRecord(String line_record) {Sensor sen=new Sensor();String[] fields=line_record.split(",");//按格式进行拆分sen.setTrading_day(Integer.parseInt(fields[0].trim()));sen.setUpdatetime(Long.parseLong(fields[1].trim()));sen.setInstrument_id(Float.parseFloat(fields[2].trim()));sen.setGap_number(Double.parseDouble(fields[3].trim()));sen.setReserve(fields[64].trim());return sen;}public static void main(String[] args) {long timeS=System.currentTimeMillis();Example.readLocalExcel();long timeE=System.currentTimeMillis();System.out.println("最终执行时间为:"+(timeS-timeE));}
}

java实现hdf5表数据的动态逐条追加相关推荐

  1. java导出hbase表数据_通用MapReduce程序复制HBase表数据

    编写MR程序,让其可以适合大部分的HBase表数据导入到HBase表数据.其中包括可以设置版本数.可以设置输入表的列导入设置(选取其中某几列).可以设置输出表的列导出设置(选取其中某几列). 原始表t ...

  2. Java 实现Excel表数据的读取和写入 以及过程中可能遇到的问题

    问题1:Unable to recognize OLE stream格式的问题要可能是因为给的数据是2010年的数据表后缀为.xlsx,要先转化成2003版的后缀为.xls问题2:Warning: P ...

  3. Java form表单原理,动态表单及动态建表实现原理[Java编程]

    赞助商链接 本文"动态表单及动态建表实现原理[Java编程]"是由七道奇为您精心收集,来源于网络转载,文章版权归文章作者所有,本站不对其观点以及内容做任何评价,请读者自行判断,以下 ...

  4. JDBC使用Java反射万能查询Oracle表数据、插入数据——【JDBC编程】

    JDBC使用反射智能查询Oracle表数据 JDBC编程中使用反射技术,动态获取Oracle表数据 1.反射的概念 2.构建Oracle数据库连接方法,关闭方法--便于调用 静态常量类: Oracle ...

  5. eclipse给mysql修改表数据_Eclipse中java向数据库中添加数据,更新数据,删除数据...

    ASP.NET网页动态添加.更新或删除数据行 看过此篇 http://www.cnblogs.com/insus/p/3247935.html的网友,也许明白Insus.NET是怎样实现动态添加数据行 ...

  6. java多个数据库数据进行访问_通过Spring Boot配置动态数据源访问多个数据库的实现代码...

    之前写过一篇博客<Spring+Mybatis+Mysql搭建分布式数据库访问框架>描述如何通过Spring+Mybatis配置动态数据源访问多个数据库.但是之前的方案有一些限制(原博客中 ...

  7. AJAX for Java简单表数据查询实例

    AJAX for Java简单表数据查询实例<?XML:NAMESPACE PREFIX = O /> AJAX WebShop 3对Java开发具有良好的支持,同时也提供了各种层次的后台 ...

  8. java 解析word模板为xml, 动态填充数据到xml,最后输出word文档

    java 解析word模板为xml, 动态填充数据到xml,最后输出word文档 在项目中, 一开始是使用java Apache poi 根据word模板生成word报表, 后面发现框架有个低版本的p ...

  9. vue form表单提交动态数据

    项目vue-cli搭建 需求为:vue页面跳转至第三方的支付页面 操作流程为:点击确认按钮时得到得到第三方网页的url和参数信息,以form表单的post方式提交 尝试了N种方法,都不可以成功完成逻辑 ...

最新文章

  1. 关于mysql的“+0”操作
  2. 用nero刻录视频文件的时候不能添加文件
  3. Qt C++ 检测优盘插入或拔出
  4. MySQL 8.0 Server层最新架构详解
  5. Spring : Spring定义Bean的两种方式:lt; bean gt;和@Bean
  6. 2019年,有远见的程序员都在关注这些硬核公众号
  7. Leetcode 208:实现Trie(前缀树)
  8. JVM第四节:JVM 执行子程序
  9. headtail命令
  10. c语言中如何进行开方和求一个数的几次方;
  11. centos 的 tar 命令
  12. 两个对象值相同(x.equals(y) == true),但却可有不同的hashCode,这句话对不对?
  13. 网络广告的12种计费方式
  14. F - Ubiquitous Religions
  15. ipa文件如何下载安装OR如何设置IPA文件下载链接
  16. vue的h函数_vue 中的h函数
  17. 小悦文件保险箱 - 一款保存私密文件的工具
  18. 未知usb设备(设备描述请求失败)_HomePod mini?电源线同样不可拆卸:但或能用USB-C移动电源供电...
  19. 使用jquery的bind来检测textarea,input等的文本变化
  20. 地图服务 WMS WFS WCS TMS

热门文章

  1. 为什么目前的串行比并行传输快
  2. 如何有效制服一个杠精
  3. scrapy爬取微信公众号内容,多管道储存,orm数据储存
  4. Zepto中文API
  5. 通信网基础缩略语集合
  6. 深度强化学习综述论文 A Brief Survey of Deep Reinforcement Learning
  7. Linux下的dd和cat
  8. 互联网快讯:中国联通推出5G视频热线;极米Z6X Pro、极米H3S持续热销;丰速运与云快充达成合作
  9. Android开发的单词本APP项目介绍及源码(大作业)
  10. CN基于词库的中文转拼音优质解决方案,单类单文件版,支持低版本PHP