[JAVA] Code style check added (#1984)

This commit is contained in:
Anna Likholat 2020-09-09 17:49:23 +03:00 committed by GitHub
parent 40fd1858a2
commit 53c03db307
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 491 additions and 401 deletions

View File

@ -31,10 +31,37 @@ jobs:
if: failure()
run: |
ngraph/maint/apply-code-format.sh
git diff >code_style_diff.patch
git diff >ngraph_code_style_diff.patch
- uses: actions/upload-artifact@v2
if: failure()
with:
name: code_style_diff
path: code_style_diff.patch
name: ngraph_code_style_diff
path: ngraph_code_style_diff.patch
Java:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- uses: actions/setup-java@v1
with:
java-version: '11'
- name: Install dependencies
run: |
wget -nc https://github.com/google/google-java-format/releases/download/google-java-format-1.9/google-java-format-1.9-all-deps.jar
- name: Check code style
run: |
java -jar google-java-format-1.9-all-deps.jar --set-exit-if-changed -a -i $(find . -type f -name "*.java")
- name: Create code style diff
if: failure()
run: |
git diff >java_code_style_diff.patch
- uses: actions/upload-artifact@v2
if: failure()
with:
name: java_code_style_diff
path: java_code_style_diff.patch

View File

@ -11,7 +11,7 @@ public class Blob extends IEWrapper {
}
public Blob(TensorDesc tensorDesc, byte[] data) {
super(BlobByte(tensorDesc.getNativeObjAddr(), data)) ;
super(BlobByte(tensorDesc.getNativeObjAddr(), data));
}
public Blob(TensorDesc tensorDesc, float[] data) {
@ -22,7 +22,7 @@ public class Blob extends IEWrapper {
super(BlobCArray(tensorDesc.nativeObj, cArray));
}
public TensorDesc getTensorDesc(){
public TensorDesc getTensorDesc() {
return new TensorDesc(GetTensorDesc(nativeObj));
}

View File

@ -8,11 +8,11 @@ public class CNNNetwork extends IEWrapper {
super(addr);
}
public String getName(){
public String getName() {
return getName(nativeObj);
}
public int getBatchSize(){
public int getBatchSize() {
return getBatchSize(nativeObj);
}
@ -20,7 +20,7 @@ public class CNNNetwork extends IEWrapper {
return GetOutputsInfo(nativeObj);
}
public Map<String, InputInfo> getInputsInfo(){
public Map<String, InputInfo> getInputsInfo() {
return GetInputsInfo(nativeObj);
}
@ -28,7 +28,7 @@ public class CNNNetwork extends IEWrapper {
reshape(nativeObj, inputShapes);
}
public Map<String, int[]> getInputShapes(){
public Map<String, int[]> getInputShapes() {
return getInputShapes(nativeObj);
}

View File

@ -1,6 +1,6 @@
package org.intel.openvino;
public class Data extends IEWrapper{
public class Data extends IEWrapper {
protected Data(long addr) {
super(addr);

View File

@ -25,8 +25,10 @@ public class IECore extends IEWrapper {
return new ExecutableNetwork(LoadNetwork(nativeObj, net.getNativeObjAddr(), device));
}
public ExecutableNetwork LoadNetwork(CNNNetwork net, final String device, final Map<String, String> config) {
return new ExecutableNetwork(LoadNetwork1(nativeObj, net.getNativeObjAddr(), device, config));
public ExecutableNetwork LoadNetwork(
CNNNetwork net, final String device, final Map<String, String> config) {
long network = LoadNetwork1(nativeObj, net.getNativeObjAddr(), device, config);
return new ExecutableNetwork(network);
}
public void RegisterPlugin(String pluginName, String deviceName) {
@ -64,11 +66,13 @@ public class IECore extends IEWrapper {
/*----------------------------------- native methods -----------------------------------*/
private static native long ReadNetwork(long core, final String modelFileName);
private static native long ReadNetwork1(long core, final String modelPath, final String weightPath);
private static native long ReadNetwork1(
long core, final String modelPath, final String weightPath);
private static native long LoadNetwork(long core, long net, final String device);
private static native long LoadNetwork1(long core, long net, final String device, final Map<String, String> config);
private static native long LoadNetwork1(
long core, long net, final String device, final Map<String, String> config);
private static native void RegisterPlugin(long core, String pluginName, String deviceName);

View File

@ -3,7 +3,7 @@ package org.intel.openvino;
public class IEWrapper {
protected final long nativeObj;
protected IEWrapper(long addr){
protected IEWrapper(long addr) {
nativeObj = addr;
}

View File

@ -28,7 +28,7 @@ public class InferRequest extends IEWrapper {
return StatusCode.valueOf(Wait(nativeObj, waitMode.getValue()));
}
public void SetCompletionCallback(Runnable runnable){
public void SetCompletionCallback(Runnable runnable) {
SetCompletionCallback(nativeObj, runnable);
}

View File

@ -38,7 +38,13 @@ public class InferenceEngineProfileInfo {
public String layerType;
public int executionIndex;
public InferenceEngineProfileInfo(LayerStatus status, long realTimeUSec, long cpuUSec, String execType, String layerType, int executionIndex) {
public InferenceEngineProfileInfo(
LayerStatus status,
long realTimeUSec,
long cpuUSec,
String execType,
String layerType,
int executionIndex) {
this.status = status;
this.realTimeUSec = realTimeUSec;
this.cpuUSec = cpuUSec;

View File

@ -1,6 +1,6 @@
package org.intel.openvino;
public class InputInfo extends IEWrapper{
public class InputInfo extends IEWrapper {
public InputInfo(long addr) {
super(addr);
@ -14,7 +14,7 @@ public class InputInfo extends IEWrapper{
SetLayout(nativeObj, layout.getValue());
}
public Layout getLayout(){
public Layout getLayout() {
return Layout.valueOf(getLayout(nativeObj));
}
@ -22,11 +22,11 @@ public class InputInfo extends IEWrapper{
SetPrecision(nativeObj, precision.getValue());
}
public Precision getPrecision(){
public Precision getPrecision() {
return Precision.valueOf(getPrecision(nativeObj));
}
public TensorDesc getTensorDesc(){
public TensorDesc getTensorDesc() {
return new TensorDesc(GetTensorDesc(nativeObj));
}

View File

@ -1,6 +1,6 @@
package org.intel.openvino;
public class PreProcessInfo extends IEWrapper{
public class PreProcessInfo extends IEWrapper {
public PreProcessInfo(long addr) {
super(addr);

View File

@ -39,4 +39,3 @@ public enum Precision {
return map.get(value);
}
}

View File

@ -1,7 +1,9 @@
package org.intel.openvino;
public enum ResizeAlgorithm {
NO_RESIZE(0), RESIZE_BILINEAR(1), RESIZE_AREA(2);
NO_RESIZE(0),
RESIZE_BILINEAR(1),
RESIZE_AREA(2);
private int value;

View File

@ -1,12 +1,21 @@
package org.intel.openvino;
import java.util.Map;
import java.util.HashMap;
import java.util.Map;
public enum StatusCode {
OK(0), GENERAL_ERROR(-1), NOT_IMPLEMENTED(-2), NETWORK_NOT_LOADED(-3),
PARAMETER_MISMATCH(-4), NOT_FOUND(-5), OUT_OF_BOUNDS(-6), UNEXPECTED(-7),
REQUEST_BUSY(-8), RESULT_NOT_READY(-9), NOT_ALLOCATED(-10), INFER_NOT_STARTED(-11),
OK(0),
GENERAL_ERROR(-1),
NOT_IMPLEMENTED(-2),
NETWORK_NOT_LOADED(-3),
PARAMETER_MISMATCH(-4),
NOT_FOUND(-5),
OUT_OF_BOUNDS(-6),
UNEXPECTED(-7),
REQUEST_BUSY(-8),
RESULT_NOT_READY(-9),
NOT_ALLOCATED(-10),
INFER_NOT_STARTED(-11),
NETWORK_NOT_READ(-12);
private int value;

View File

@ -1,10 +1,8 @@
package org.intel.openvino;
import java.util.concurrent.BlockingDeque;
public class TensorDesc extends IEWrapper {
public TensorDesc(long addr){
public TensorDesc(long addr) {
super(addr);
}
@ -16,11 +14,11 @@ public class TensorDesc extends IEWrapper {
return GetDims(nativeObj);
}
public Layout getLayout(){
public Layout getLayout() {
return Layout.valueOf(getLayout(nativeObj));
}
public Precision getPrecision(){
public Precision getPrecision() {
return Precision.valueOf(getPrecision(nativeObj));
}

View File

@ -1,7 +1,8 @@
package org.intel.openvino;
public enum WaitMode {
RESULT_READY(-1), STATUS_ONLY(0);
RESULT_READY(-1),
STATUS_ONLY(0);
private int value;

View File

@ -1,5 +1,5 @@
import java.util.Map;
import java.util.HashMap;
import java.util.Map;
public class ArgumentParser {
private Map<String, String> input;
@ -25,8 +25,8 @@ public class ArgumentParser {
}
public void parseArgs(String[] args) {
try{
for(int i = 0; i < args.length; i++) {
try {
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (arg.equals("--help") | arg.equals("-h")) {
printHelp();
@ -40,7 +40,7 @@ public class ArgumentParser {
}
}
}
} catch(ArrayIndexOutOfBoundsException e) {
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("Error: Incorrect number of arguments");
System.exit(0);
}

View File

@ -61,10 +61,7 @@ https://download.01.org/opencv/2019/open_model_zoo/R1/models_bin/face-detection-
## Build and run
Build and run steps are similar to ```benchmark_app```, but you need to add OpenCV path.
### Build
Add an environment variable with OpenCV installation or build path:
Build and run steps are similar to ```benchmark_app```, but you need to add an environment variable with OpenCV installation or build path before building:
```bash
export OpenCV_DIR=/path/to/opencv/
```

View File

@ -1,14 +1,15 @@
import java.util.Map;
import org.intel.openvino.*;
import java.util.Map;
public class InferReqWrap {
public InferReqWrap(ExecutableNetwork net, int id, InferRequestsQueue irQueue) {
request = net.CreateInferRequest();
this.id = id;
this.irQueue = irQueue;
request.SetCompletionCallback(new Runnable() {
request.SetCompletionCallback(
new Runnable() {
@Override
public void run() {
@ -43,7 +44,7 @@ public class InferReqWrap {
}
double getExecutionTimeInMilliseconds() {
return (double)(endTime - startTime) * 1e-6;
return (double) (endTime - startTime) * 1e-6;
}
InferRequest request;

View File

@ -1,9 +1,9 @@
import org.intel.openvino.*;
import java.util.Vector;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.intel.openvino.*;
public class InferRequestsQueue {
public InferRequestsQueue(ExecutableNetwork net, int nireq) {
for (int id = 0; id < nireq; id++) {
@ -20,7 +20,7 @@ public class InferRequestsQueue {
}
double getDurationInMilliseconds() {
return (double)(endTime - startTime) * 1e-6;
return (double) (endTime - startTime) * 1e-6;
}
void putIdleRequest(int id, double latency) {
@ -47,7 +47,7 @@ public class InferRequestsQueue {
void waitAll() {
synchronized (foo) {
try {
while(idleIds.size() != requests.size()) {
while (idleIds.size() != requests.size()) {
foo.wait();
}
} catch (InterruptedException e) {

View File

@ -1,28 +1,26 @@
import java.util.Map;
import java.util.Vector;
import javax.management.RuntimeErrorException;
import java.util.Random;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.ArrayList;
import java.util.Arrays;
import org.intel.openvino.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
public class Main {
static boolean adjustShapesBatch(Map<String, int[]> shapes, int batchSize, Map<String, InputInfo> inputInfo) {
static boolean adjustShapesBatch(
Map<String, int[]> shapes, int batchSize, Map<String, InputInfo> inputInfo) {
boolean updated = false;
for (Map.Entry<String, InputInfo> entry : inputInfo.entrySet()) {
Layout layout = entry.getValue().getTensorDesc().getLayout();
int batchIndex = -1;
if ((layout == Layout.NCHW) || (layout == Layout.NCDHW) ||
(layout == Layout.NHWC) || (layout == Layout.NDHWC) ||
(layout == Layout.NC)) {
if ((layout == Layout.NCHW)
|| (layout == Layout.NCDHW)
|| (layout == Layout.NHWC)
|| (layout == Layout.NDHWC)
|| (layout == Layout.NC)) {
batchIndex = 0;
} else if (layout == Layout.CN) {
batchIndex = 1;
@ -35,28 +33,42 @@ public class Main {
return updated;
}
static String setThroughputStreams(IECore core, Map<String, String> device_config, String device, int nstreams, boolean isAsync) {
static String setThroughputStreams(
IECore core,
Map<String, String> device_config,
String device,
int nstreams,
boolean isAsync) {
String key = device + "_THROUGHPUT_STREAMS";
if (nstreams > 0) {
device_config.put(key, Integer.toString(nstreams));
} else if (!device_config.containsKey(key) && isAsync) {
System.err.println("[ WARNING ] -nstreams default value is determined automatically for " + device + " device. " +
"Although the automatic selection usually provides a reasonable performance," +
"but it still may be non-optimal for some cases, for more information look at README.");
System.err.println(
"[ WARNING ] -nstreams default value is determined automatically for "
+ device
+ " device. Although the automatic selection usually provides a"
+ " reasonable performance,but it still may be non-optimal for some"
+ " cases, for more information look at README.");
device_config.put(key, device + "_THROUGHPUT_AUTO");
}
return device_config.get(key);
};
}
static void fillBlobs(Vector<InferReqWrap> requests, Map<String, InputInfo> inputsInfo) {
for (Map.Entry<String, InputInfo> entry : inputsInfo.entrySet()) {
String inputName = entry.getKey();
TensorDesc tDesc = entry.getValue().getTensorDesc();
System.err.print("[ INFO ] Network input '" + inputName + "' precision " + tDesc.getPrecision()
+ ", dimensions (" + tDesc.getLayout() + "): ");
for (int dim : tDesc.getDims())
System.err.print(dim + " ");
System.err.print(
"[ INFO ] Network input '"
+ inputName
+ "' precision "
+ tDesc.getPrecision()
+ ", dimensions ("
+ tDesc.getLayout()
+ "): ");
for (int dim : tDesc.getDims()) System.err.print(dim + " ");
System.err.println();
}
@ -74,7 +86,7 @@ public class Main {
int dims[] = tDesc.getDims();
int size = 1;
for(int i = 0; i < dims.length; i++) {
for (int i = 0; i < dims.length; i++) {
size *= dims[i];
}
@ -92,48 +104,54 @@ public class Main {
Arrays.sort(arr);
if (arr.length % 2 == 0)
return ((double)arr[arr.length / 2] + (double)arr[arr.length / 2 - 1]) / 2;
else
return (double)arr[arr.length / 2];
return ((double) arr[arr.length / 2] + (double) arr[arr.length / 2 - 1]) / 2;
else return (double) arr[arr.length / 2];
}
static boolean getApiBoolean(String api) throws RuntimeException {
if(api.equals("sync"))
return false;
else if(api.equals("async"))
return true;
if (api.equals("sync")) return false;
else if (api.equals("async")) return true;
else throw new RuntimeException("Incorrect argument: '-api'");
}
static int step = 0;
static void nextStep(String stepInfo) {
step += 1;
System.out.println("[Step " + step + "/11] " + stepInfo);
}
static int deviceDefaultDeviceDurationInSeconds(String device) {
final Map<String, Integer> deviceDefaultDurationInSeconds = new HashMap<String, Integer>() {{
put("CPU", 60 );
put("GPU", 60 );
put("VPU", 60 );
put("MYRIAD", 60 );
put("HDDL", 60 );
final Map<String, Integer> deviceDefaultDurationInSeconds =
new HashMap<String, Integer>() {
{
put("CPU", 60);
put("GPU", 60);
put("VPU", 60);
put("MYRIAD", 60);
put("HDDL", 60);
put("FPGA", 120);
put("UNKNOWN", 120);
}};
}
};
Integer duration = deviceDefaultDurationInSeconds.get(device);
if (duration == null) {
duration = deviceDefaultDurationInSeconds.get("UNKNOWN");
System.err.println("[ WARNING ] Default duration " + duration + " seconds for unknown device '" + device + "' is used");
System.err.println(
"[ WARNING ] Default duration "
+ duration
+ " seconds for unknown device '"
+ device
+ "' is used");
}
return duration;
}
static long getTotalMsTime(long startTimeMilliSec) {
return (System.currentTimeMillis() - startTimeMilliSec);
};
}
static long getDurationInMilliseconds(int seconds) {
return seconds * 1000L;
@ -147,7 +165,7 @@ public class Main {
System.exit(1);
}
// ----------------- 1. Parsing and validating input arguments ---------------------------------------------
// ----------------- 1. Parsing and validating input arguments -----------------
nextStep("Parsing and validating input arguments");
ArgumentParser parser = new ArgumentParser("This is benchmarking application");
@ -169,36 +187,35 @@ public class Main {
int batchSize = parser.getInteger("-b", 0);
int nthreads = parser.getInteger("-nthreads", 0);
int nstreams = parser.getInteger("-nstreams", 0);
int timeLimit = parser.getInteger("-t",0);
int timeLimit = parser.getInteger("-t", 0);
String api = parser.get("-api", "async");
boolean isAsync;
try{
try {
isAsync = getApiBoolean(api);
} catch(RuntimeException e) {
} catch (RuntimeException e) {
System.out.println(e.getMessage());
return;
}
if(xmlPath == null) {
if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
// ----------------- 2. Loading the Inference Engine --------------------------------------------------------
// ----------------- 2. Loading the Inference Engine --------------------------
nextStep("Loading the Inference Engine");
IECore core = new IECore();
// ----------------- 3. Setting device configuration --------------------------------------------------------
// ----------------- 3. Setting device configuration --------------------------
nextStep("Setting device configuration");
Map<String, String> device_config = new HashMap<>();
if (device.equals("CPU")) { // CPU supports few special performance-oriented keys
// limit threading for CPU portion of inference
if (nthreads > 0)
device_config.put("CPU_THREADS_NUM", Integer.toString(nthreads));
if (nthreads > 0) device_config.put("CPU_THREADS_NUM", Integer.toString(nthreads));
if (!device_config.containsKey("CPU_BIND_THREAD")) {
device_config.put("CPU_BIND_THREAD", "YES");
@ -214,13 +231,12 @@ public class Main {
} else if (device.equals("GNA")) {
device_config.put("GNA_PRECISION", "I16");
if (nthreads > 0)
device_config.put("GNA_LIB_N_THREADS", Integer.toString(nthreads));
if (nthreads > 0) device_config.put("GNA_LIB_N_THREADS", Integer.toString(nthreads));
}
core.SetConfig(device_config, device);
// ----------------- 4. Reading the Intermediate Representation network -------------------------------------
// ----------- 4. Reading the Intermediate Representation network -------------
nextStep("Reading the Intermediate Representation network");
long startTime = System.currentTimeMillis();
@ -233,7 +249,7 @@ public class Main {
String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
InputInfo inputInfo = inputsInfo.get(inputName);
// ----------------- 5. Resizing network to match image sizes and given batch -------------------------------
// ----- 5. Resizing network to match image sizes and given batch --------------
nextStep("Resizing network to match image sizes and given batch");
int inputBatchSize = batchSize;
@ -252,15 +268,19 @@ public class Main {
System.err.println("[ INFO ] Reshape network took " + durationMs + " ms");
}
System.err.println((inputBatchSize != 0 ? "[ INFO ] Network batch size was changed to: " : "[ INFO ] Network batch size: ") + batchSize);
System.err.println(
(inputBatchSize != 0
? "[ INFO ] Network batch size was changed to: "
: "[ INFO ] Network batch size: ")
+ batchSize);
// ----------------- 6. Configuring input -------------------------------------------------------------------
// ----------------- 6. Configuring input -------------------------------------
nextStep("Configuring input");
inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
inputInfo.setPrecision(Precision.U8);
// ----------------- 7. Loading the model to the device -----------------------------------------------------
// ----------------- 7. Loading the model to the device -----------------------
nextStep("Loading the model to the device");
startTime = System.currentTimeMillis();
@ -269,11 +289,12 @@ public class Main {
System.err.println("[ INFO ] Load network took " + durationMs + " ms");
// ----------------- 8. Setting optimal runtime parameters --------------------------------------------------
// ---------------- 8. Setting optimal runtime parameters ---------------------
nextStep("Setting optimal runtime parameters");
// Update number of streams
nstreams = Integer.parseInt(core.GetConfig(device, device + "_THROUGHPUT_STREAMS").asString());
String nStr = core.GetConfig(device, device + "_THROUGHPUT_STREAMS").asString();
nstreams = Integer.parseInt(nStr);
// Number of requests
if (nireq == 0) {
@ -289,8 +310,12 @@ public class Main {
int temp = niter;
niter = ((niter + nireq - 1) / nireq) * nireq;
if (temp != niter) {
System.err.println("[ INFO ] Number of iterations was aligned by request number from " +
temp + " to " + niter + " using number of requests " + nireq);
System.err.println(
"[ INFO ] Number of iterations was aligned by request number from "
+ " to "
+ niter
+ " using number of requests "
+ nireq);
}
}
@ -305,13 +330,13 @@ public class Main {
}
durationMs = getDurationInMilliseconds(durationSeconds);
// ----------------- 9. Creating infer requests and filling input blobs -------------------------------------
// ---------- 9. Creating infer requests and filling input blobs ---------------
nextStep("Creating infer requests and filling input blobs");
InferRequestsQueue inferRequestsQueue = new InferRequestsQueue(executableNetwork, nireq);
fillBlobs(inferRequestsQueue.requests, inputsInfo);
// ----------------- 10. Measuring performance --------------------------------------------------------------
// ---------- 10. Measuring performance ----------------------------------------
String ss = "Start inference " + api + "ronously";
if (isAsync) {
if (!ss.isEmpty()) {
@ -353,17 +378,18 @@ public class Main {
startTime = System.currentTimeMillis();
long execTime = getTotalMsTime(startTime);
while ((niter != 0 && iteration < niter) ||
(durationMs != 0L && execTime < durationMs) ||
(isAsync && iteration % nireq != 0)) {
while ((niter != 0 && iteration < niter)
|| (durationMs != 0L && execTime < durationMs)
|| (isAsync && iteration % nireq != 0)) {
inferRequest = inferRequestsQueue.getIdleRequest();
if (isAsync) {
// As the inference request is currently idle, the wait() adds no additional overhead
//(and should return immediately).
// As the inference request is currently idle, the wait() adds no additional
// overhead (and should return immediately).
// The primary reason for calling the method is exception checking/re-throwing.
// Callback, that governs the actual execution can handle errors as well,
// but as it uses just error codes it has no details like what() method of `std::exception`
// but as it uses just error codes it has no details like what() method of
// `std::exception`.
// So, rechecking for any exceptions here.
inferRequest._wait();
inferRequest.startAsync();
@ -380,10 +406,12 @@ public class Main {
double latency = getMedianValue(inferRequestsQueue.getLatencies());
double totalDuration = inferRequestsQueue.getDurationInMilliseconds();
double fps = (!isAsync) ? batchSize * 1000.0 / latency :
batchSize * 1000.0 * iteration / totalDuration;
double fps =
(!isAsync)
? batchSize * 1000.0 / latency
: batchSize * 1000.0 * iteration / totalDuration;
// ----------------- 11. Dumping statistics report ----------------------------------------------------------
// ------------ 11. Dumping statistics report ----------------------------------
nextStep("Dumping statistics report");
System.out.println("Count: " + iteration + " iterations");

View File

@ -1,12 +1,11 @@
import org.opencv.core.*;
import org.opencv.imgcodecs.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.intel.openvino.*;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.*;
import org.opencv.imgproc.Imgproc;
import java.util.Map;
import java.util.Set;
import java.util.ArrayList;
import java.util.Map;
/*
This is face detection java sample.
@ -42,11 +41,11 @@ public class Main {
String imgPath = parser.get("-i", null);
String xmlPath = parser.get("-m", null);
if(imgPath == null) {
if (imgPath == null) {
System.out.println("Error: Missed argument: -i");
return;
}
if(xmlPath == null) {
if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
@ -57,7 +56,8 @@ public class Main {
TensorDesc tDesc = new TensorDesc(Precision.U8, dimsArr, Layout.NHWC);
// The source image is also used at the end of the program to display the detection results,
// therefore the Mat object won't be destroyed by Garbage Collector while the network is running.
// therefore the Mat object won't be destroyed by Garbage Collector while the network is
// running.
Blob imgBlob = new Blob(tDesc, image.dataAddr());
IECore core = new IECore();
@ -89,14 +89,12 @@ public class Main {
for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) {
int image_id = (int) detection[curProposal * 7];
if (image_id < 0)
break;
if (image_id < 0) break;
float confidence = detection[curProposal * 7 + 2];
// Drawing only objects with >70% probability
if (confidence < THRESHOLD)
continue;
if (confidence < THRESHOLD) continue;
int label = (int) (detection[curProposal * 7 + 1]);
int xmin = (int) (detection[curProposal * 7 + 3] * image.cols());
@ -104,12 +102,15 @@ public class Main {
int xmax = (int) (detection[curProposal * 7 + 5] * image.cols());
int ymax = (int) (detection[curProposal * 7 + 6] * image.rows());
System.out.println("[" + curProposal + "," + label + "] element, prob = " + confidence + " (" + xmin
+ "," + ymin + ")-(" + xmax + "," + ymax + ")");
String result = "[" + curProposal + "," + label + "] element, prob = " + confidence;
result += " (" + xmin + "," + ymin + ")-(" + xmax + "," + ymax + ")";
System.out.println(result);
System.out.println(" - WILL BE PRINTED!");
// Draw rectangle around detected object.
Imgproc.rectangle(image, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0));
Imgproc.rectangle(
image, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0));
}
HighGui.namedWindow("Detection", HighGui.WINDOW_AUTOSIZE);

View File

@ -1,20 +1,18 @@
import org.intel.openvino.*;
import org.opencv.core.*;
import org.opencv.imgcodecs.*;
import org.opencv.videoio.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.*;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.Vector;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.Map;
import java.util.Queue;
import java.util.ArrayList;
import java.util.HashMap;
import org.intel.openvino.*;
/*
This is async face detection java sample.
@ -42,8 +40,7 @@ public class Main {
int requestId = startedRequestsIds.peek();
InferRequest inferRequest = inferRequests.get(requestId);
if (inferRequest.Wait(wait) != StatusCode.OK)
return;
if (inferRequest.Wait(wait) != StatusCode.OK) return;
if (size == 0 && res == null) {
size = inferRequest.GetBlob(outputName).size();
@ -86,11 +83,11 @@ public class Main {
String device = parser.get("-d", "CPU");
int inferRequestsSize = parser.getInteger("-nireq", 2);
if(imgsPath == null ) {
if (imgsPath == null) {
System.out.println("Error: Missed argument: -i");
return;
}
if(xmlPath == null) {
if (xmlPath == null) {
System.out.println("Error: Missed argument: -m");
return;
}
@ -99,7 +96,8 @@ public class Main {
BlockingQueue<Mat> framesQueue = new LinkedBlockingQueue<Mat>();
Thread captureThread = new Thread(new Runnable() {
Runnable capture =
new Runnable() {
@Override
public void run() {
VideoCapture cam = new VideoCapture();
@ -111,10 +109,11 @@ public class Main {
framesQueue.add(frame.clone());
}
}
});
Thread inferThread = new Thread(new Runnable() {
};
Thread captureThread = new Thread(capture);
Runnable infer =
new Runnable() {
@Override
public void run() {
try {
@ -125,18 +124,21 @@ public class Main {
String inputName = new ArrayList<String>(inputsInfo.keySet()).get(0);
InputInfo inputInfo = inputsInfo.get(inputName);
inputInfo.getPreProcess().setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
inputInfo
.getPreProcess()
.setResizeAlgorithm(ResizeAlgorithm.RESIZE_BILINEAR);
inputInfo.setLayout(Layout.NHWC);
inputInfo.setPrecision(Precision.U8);
outputName = new ArrayList<String>(net.getOutputsInfo().keySet()).get(0);
outputName =
new ArrayList<String>(net.getOutputsInfo().keySet()).get(0);
ExecutableNetwork executableNetwork = core.LoadNetwork(net, device);
ExecutableNetwork execNetwork = core.LoadNetwork(net, device);
asyncInferIsFree = new Vector<Boolean>(inferRequestsSize);
for (int i = 0; i < inferRequestsSize; i++) {
inferRequests.add(executableNetwork.CreateInferRequest());
inferRequests.add(execNetwork.CreateInferRequest());
asyncInferIsFree.add(true);
}
@ -146,21 +148,22 @@ public class Main {
processInferRequets(WaitMode.STATUS_ONLY);
for (int i = 0; i < inferRequestsSize; i++) {
if (!asyncInferIsFree.get(i))
continue;
if (!asyncInferIsFree.get(i)) continue;
Mat frame = framesQueue.poll(0, TimeUnit.SECONDS);
if (frame == null)
break;
if (frame == null) break;
InferRequest request = inferRequests.get(i);
asyncInferIsFree.setElementAt(false, i);
processedFramesQueue.add(frame); // predictionsQueue is used in rendering
// processedFramesQueue is used in rendering
processedFramesQueue.add(frame);
// The source frame is kept in processedFramesQueue,
// so the frame will be removed by java Garbage Collector only after completion of inference,
// so the frame will be removed by java Garbage
// Collector only after completion of inference,
// and we can create Blob object using Mat object data address.
Blob imgBlob = imageToBlob(frame);
request.SetBlob(inputName, imgBlob);
@ -174,36 +177,34 @@ public class Main {
e.printStackTrace();
for (Thread t : Thread.getAllStackTraces().keySet())
if (t.getState()==Thread.State.RUNNABLE)
t.interrupt();
if (t.getState() == Thread.State.RUNNABLE) t.interrupt();
}
}
});
};
Thread inferThread = new Thread(infer);
captureThread.start();
inferThread.start();
TickMeter tm = new TickMeter();
Scalar color = new Scalar(0, 255, 0);
try {
while (inferThread.isAlive() || !detectionOutput.isEmpty()) {
float[] detection = detectionOutput.poll(waitingTime, TimeUnit.SECONDS);
if (detection == null)
continue;
if (detection == null) continue;
Mat img = processedFramesQueue.poll(waitingTime, TimeUnit.SECONDS);
int maxProposalCount = detection.length / 7;
for (int curProposal = 0; curProposal < maxProposalCount; curProposal++) {
int imageId = (int) detection[curProposal * 7];
if (imageId < 0)
break;
if (imageId < 0) break;
float confidence = detection[curProposal * 7 + 2];
// Drawing only objects with >70% probability
if (confidence < CONFIDENCE_THRESHOLD)
continue;
if (confidence < CONFIDENCE_THRESHOLD) continue;
int label = (int) (detection[curProposal * 7 + 1]);
int xmin = (int) (detection[curProposal * 7 + 3] * img.cols());
@ -212,21 +213,25 @@ public class Main {
int ymax = (int) (detection[curProposal * 7 + 6] * img.rows());
// Draw rectangle around detected object.
Imgproc.rectangle(img, new Point(xmin, ymin), new Point(xmax, ymax), new Scalar(0, 255, 0), 2);
Point lt = new Point(xmin, ymin);
Point br = new Point(xmax, ymax);
Imgproc.rectangle(img, lt, br, color, 2);
}
if (resultCounter == warmupNum) {
tm.start();
} else if (resultCounter > warmupNum) {
tm.stop();
double worksFps = ((double)(resultCounter - warmupNum)) / tm.getTimeSec();
double readFps = ((double)(framesCounter - warmupNum)) / tm.getTimeSec();
double worksFps = ((double) (resultCounter - warmupNum)) / tm.getTimeSec();
double readFps = ((double) (framesCounter - warmupNum)) / tm.getTimeSec();
tm.start();
Imgproc.putText(img, "Reading fps: " + String.format("%.3f", readFps), new Point(10, 50), 0 , 0.7, new Scalar(0, 255, 0), 1);
Imgproc.putText(img, "Inference fps: " + String.format("%.3f", worksFps), new Point(10, 80), 0 , 0.7, new Scalar(0, 255, 0), 1);
}
String label = "Reading fps: " + String.format("%.3f", readFps);
String label1 = "Inference fps: " + String.format("%.3f", worksFps);
Imgproc.putText(img, label, new Point(10, 50), 0, 0.7, color, 1);
Imgproc.putText(img, label1, new Point(10, 80), 0, 0.7, color, 1);
}
HighGui.imshow("Detection", img);
}
@ -239,8 +244,7 @@ public class Main {
} catch (InterruptedException e) {
e.printStackTrace();
for (Thread t : Thread.getAllStackTraces().keySet())
if (t.getState()==Thread.State.RUNNABLE)
t.interrupt();
if (t.getState() == Thread.State.RUNNABLE) t.interrupt();
}
}

View File

@ -1,8 +1,7 @@
import org.intel.openvino.*;
import org.junit.Assert;
import org.junit.Test;
import org.intel.openvino.*;
public class BlobTests extends IETest {
@Test

View File

@ -1,12 +1,12 @@
import static org.junit.Assert.*;
import org.intel.openvino.*;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.intel.openvino.*;
public class CNNNetworkTests extends IETest {
IECore core = new IECore();
@ -45,5 +45,4 @@ public class CNNNetworkTests extends IETest {
assertEquals("Input size", 2, output.size());
}
}

View File

@ -1,10 +1,10 @@
import static org.junit.Assert.*;
import org.junit.Test;
import org.intel.openvino.*;
import org.junit.Test;
import java.util.Map;
import java.util.HashMap;
import java.util.Map;
public class IECoreTests extends IETest {
IECore core = new IECore();
@ -57,7 +57,7 @@ public class IECoreTests extends IETest {
Map<String, String> testMap = new HashMap<String, String>();
//When specifying key values as raw strings, omit the KEY_ prefix
// When specifying key values as raw strings, omit the KEY_ prefix
testMap.put("CPU_BIND_THREAD", "YES");
testMap.put("CPU_THREADS_NUM", "1");
@ -75,6 +75,8 @@ public class IECoreTests extends IETest {
} catch (Exception e) {
exceptionMessage = e.getMessage();
}
assertTrue(exceptionMessage.contains("Device with \"DEVISE\" name is not registered in the InferenceEngine"));
assertTrue(
exceptionMessage.contains(
"Device with \"DEVISE\" name is not registered in the InferenceEngine"));
}
}

View File

@ -1,15 +1,11 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.intel.openvino.*;
import org.junit.Ignore;
import org.junit.runner.Description;
import org.junit.Rule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import java.nio.file.Paths;
import org.intel.openvino.*;
@Ignore
public class IETest {
String modelXml;
@ -23,12 +19,25 @@ public class IETest {
System.err.println("Failed to load Inference Engine library\n" + e);
System.exit(1);
}
modelXml = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.xml").toString();
modelBin = Paths.get(System.getenv("MODELS_PATH"), "models", "test_model", "test_model_fp32.bin").toString();
modelXml =
Paths.get(
System.getenv("MODELS_PATH"),
"models",
"test_model",
"test_model_fp32.xml")
.toString();
modelBin =
Paths.get(
System.getenv("MODELS_PATH"),
"models",
"test_model",
"test_model_fp32.bin")
.toString();
}
@Rule
public TestWatcher watchman = new TestWatcher() {
public TestWatcher watchman =
new TestWatcher() {
@Override
protected void succeeded(Description description) {
System.out.println(description + " - OK");

View File

@ -1,13 +1,12 @@
import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.Before;
import java.util.Map;
import java.util.Vector;
import java.util.ArrayList;
import org.intel.openvino.*;
import org.intel.openvino.InferenceEngineProfileInfo.LayerStatus;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Map;
import java.util.Vector;
public class InferRequestTests extends IETest {
IECore core;
@ -64,7 +63,8 @@ public class InferRequestTests extends IETest {
assertEquals(key + " execType", key, layer_name.elementAt(i));
assertEquals(key + " executionIndex", i, resVal.executionIndex);
assertTrue(resVal.status == InferenceEngineProfileInfo.LayerStatus.EXECUTED
assertTrue(
resVal.status == InferenceEngineProfileInfo.LayerStatus.EXECUTED
|| resVal.status == InferenceEngineProfileInfo.LayerStatus.NOT_RUN);
}
}
@ -79,7 +79,8 @@ public class InferRequestTests extends IETest {
@Test
public void testSetCompletionCallback() {
inferRequest.SetCompletionCallback(new Runnable() {
inferRequest.SetCompletionCallback(
new Runnable() {
@Override
public void run() {
@ -87,7 +88,7 @@ public class InferRequestTests extends IETest {
}
});
for(int i = 0; i < 5; i++) {
for (int i = 0; i < 5; i++) {
inferRequest.Wait(WaitMode.RESULT_READY);
inferRequest.StartAsync();
}

View File

@ -1,11 +1,11 @@
import static org.junit.Assert.*;
import org.intel.openvino.*;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Map;
import org.intel.openvino.*;
public class InputInfoTests extends IETest {
IECore core = new IECore();
@ -33,5 +33,4 @@ public class InputInfoTests extends IETest {
assertEquals("setPrecision", Precision.U8, inputInfo.getPrecision());
}
}

View File

@ -1,3 +1,4 @@
import org.intel.openvino.*;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;

View File

@ -1,40 +1,38 @@
import org.junit.runner.RunWith;
import org.junit.runners.AllTests;
import junit.framework.TestSuite;
import java.util.List;
import java.util.ArrayList;
import java.util.zip.*;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.intel.openvino.*;
import org.junit.runner.RunWith;
import org.junit.runners.AllTests;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.Class;
import java.net.*;
import org.intel.openvino.*;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.*;
@RunWith(AllTests.class)
public class TestsSuite extends IETest{
public class TestsSuite extends IETest {
public static TestSuite suite() {
TestSuite suite = new TestSuite();
try {
//get openvino_test.jar path
String dir = new File(TestsSuite.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getPath().toString();
// get openvino_test.jar path
String dir =
new File(
TestsSuite.class
.getProtectionDomain()
.getCodeSource()
.getLocation()
.toURI())
.getPath()
.toString();
List<Class<?>> results = findClasses(dir);
for (Class<?> cl : results) {
if (cl.getName() == "ArgumentParser")
continue;
if (cl.getName() == "ArgumentParser") continue;
suite.addTest(new junit.framework.JUnit4TestAdapter(cl));
}
} catch (ClassNotFoundException e) {
@ -51,14 +49,19 @@ public class TestsSuite extends IETest{
ZipInputStream zip = new ZipInputStream(new FileInputStream(directory));
for (ZipEntry entry = zip.getNextEntry(); entry != null; entry = zip.getNextEntry()) {
String name = entry.getName().toString();
if (name.endsWith(".class") && !name.contains("$") && !name.contains("/")
&& !name.equals("TestsSuite.class") && !name.equals("OpenVinoTestRunner.class") && !name.equals("IETest.class")) {
classes.add(Class.forName(name.substring(0, name.length() - ".class".length())));
if (name.endsWith(".class")
&& !name.contains("$")
&& !name.contains("/")
&& !name.equals("TestsSuite.class")
&& !name.equals("OpenVinoTestRunner.class")
&& !name.equals("IETest.class")) {
classes.add(
Class.forName(name.substring(0, name.length() - ".class".length())));
}
}
} catch(FileNotFoundException e) {
} catch (FileNotFoundException e) {
System.out.println("FileNotFoundException: " + e.getMessage());
} catch(IOException e) {
} catch (IOException e) {
System.out.println("IOException: " + e.getMessage());
}
return classes;