Streaming Large Files

When working with large files over 60MB, multiple URLs will be provided so the attachment can be split into chunks and uploaded in parallel. Each chunk will support 20MB of the file. You will have to split the file into chunks before uploading to each URL. The following example illustrates how to split data into chunks.

package eosutils.streamlargefile;

import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.IntStream;
import org.apache.commons.io.FileUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import eosutils.utils.EOSUtils;

public class StreamLargeFile {
  
  private static final Properties props = new Properties();
  private static String resourcePath = "/src/main/resources/streamlargefile";
  private static byte[] byteInputParam;
  private static int sizeLimit = 20971520;
  private static byte[] chunkedByte; 
  
  @SuppressWarnings("deprecation")
  public static void main(String... args) throws Exception {
    String pathToFile = EOSUtils.f.getAbsolutePath() + resourcePath + File.separator + "largefiletest.json";
    String inputData = new String(Files.readAllBytes(Paths.get(pathToFile)));
    JSONObject jObj = new JSONObject(inputData);
    streamLargeFile(jObj);
  }

  public static void streamLargeFile(JSONObject jObj) throws Exception {
    JSONArray jAr = jObj.getJSONObject("multiChunk").getJSONArray("chunkList");
    //System.out.println("jObj: " + jAr.toString());
    //System.out.println("#Objects : " + jAr.length());
    String filename = jObj.getString("filename");
    String commitURL = jObj.getJSONObject("multiChunk").getString("commitUrl");
    byteInputParam = EOSUtils.convertDocToByteArray(filename, resourcePath);
    
    ExecutorService executor = Executors.newFixedThreadPool(jAr.length());
    System.out.println("*****************************************************************");
    System.out.println("*************STARTING MULTI PART UPLOAD*************");
    System.out.println("*****************************************************************");
    
    for (int i=0, chunkStart=0; i <jAr.length(); chunkStart+=sizeLimit) {
      executor.submit(chunkinParallel(jAr.getJSONObject(i), ++i, chunkStart));
    }
    
    executor.shutdown();
    while (!executor.isTerminated()) {
    }
    System.out.println("Finished all threads FOR MULTI PART UPLOAD");
    System.out.println("*****************************************************************");
    System.out.println("Commiting Streaming Upload ");
    HttpResponse httpResponse = EOSUtils.executePostService(commitURL, "", null);
    
    System.out.println(httpResponse.toString());
    System.out.println("************************COMPLETE UPLOAD OF LARGE FILE*****************************************");


  }
  
  static Runnable chunkinParallel(JSONObject obj, int i, int chunkStart) {
    Runnable runnable = () -> {
      System.out.println("Inside : Thread for partNumber " + i + "  , " + Thread.currentThread().getName());
      try {
        uploadChunk(obj, i, chunkStart);
      } catch (Exception e) {
        e.printStackTrace();
      }
  };
  return runnable;
  }

  public static void uploadChunk(JSONObject obj, int i, int chunkStart) throws Exception {
    String uploadUrl = obj.getString("uploadUrl");
    int size = obj.getInt("size");
    int chunkEnd = chunkStart + size;
    System.out.println("partNumber:" + i + ", chunkStart:" + chunkStart + ", chunkEnd:" + chunkEnd + ", uploadUrl:" + uploadUrl);
    //System.out.println("partNumber:" + i + ", chunkStart:" + chunkStart + ", chunkEnd:" + i*sizeLimit + ", uploadUrl:" + uploadUrl);
    //byte[] chunkedByte = Arrays.copyOfRange(byteInputParam, chunkStart, i*sizeLimit);
    byte[] chunkedByte = Arrays.copyOfRange(byteInputParam, chunkStart, chunkEnd);
    int counter = 1;
    HttpResponse httpResponse = EOSUtils.executePutService(uploadUrl, chunkedByte, null);
    while (httpResponse.getStatusLine().getStatusCode() != 200 && counter <=2) {
      Thread.sleep(2000);
      counter ++;
      System.out.println("partNumber  " + i + ", Try " + counter + ", previous try failed with : " + httpResponse.getStatusLine().getStatusCode());
      System.out.println("partNumber  " + i + ", failed. " + ", HTTP Response=" +  httpResponse.toString());
      httpResponse = EOSUtils.executePutService(uploadUrl, chunkedByte, null);
    }
    System.out.println("********************PART UPLOAD DONE*********************************************");
    System.out.println("Complete partNumber  " + i + ", HTTP Response=" +  httpResponse.toString());
    System.out.println("*****************************************************************");

  }



}
API Training Shop Blog About
© 2019 GitHub, Inc. Help Support