Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Android - Sensor::getProfile added, sensor example using it #9352

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import com.intel.realsense.librealsense.StreamType;
import com.intel.realsense.librealsense.Extension;

import com.intel.realsense.librealsense.VideoStreamProfile;
import com.intel.realsense.librealsense.FrameCallback;
import com.intel.realsense.librealsense.GLRsSurfaceView;
import com.intel.realsense.librealsense.RsContext;
Expand All @@ -49,7 +48,9 @@ public class MainActivity extends AppCompatActivity {
private RsContext mRsContext;

private Device mDevice;
DepthSensor depth_sensor = null;
DepthSensor mDepthSensor = null;
StreamProfile mDepthProfile = null;
StreamProfile mIrProfile = null;


@Override
Expand Down Expand Up @@ -105,9 +106,10 @@ protected void onResume() {
protected void onPause() {
super.onPause();
stop();
releaseContext();
}

private FrameCallback mDepthFrameHandler = new FrameCallback()
private FrameCallback mFrameHandler = new FrameCallback()
{
@Override
public void onFrame(final Frame f) {
Expand Down Expand Up @@ -146,6 +148,8 @@ private void init(){
if( num_devices> 0) {
mDevice = dl.createDevice(0);
showConnectLabel(false);
assignDepthSensor();
assignProfiles();
start();
}
}
Expand Down Expand Up @@ -174,89 +178,24 @@ public void onDeviceDetach() {
};

private void configAndStart() throws Exception {
List<Sensor> sensors = mDevice.querySensors();

for(Sensor s : sensors)
{
if (s.is(Extension.DEPTH_SENSOR)) {
depth_sensor = s.as(Extension.DEPTH_SENSOR);
}
}

boolean depth_profile_found = false;
boolean ir_profile_found = false;

if (depth_sensor != null) {
List<StreamProfile> stream_profiles = depth_sensor.getStreamProfiles();
StreamProfile depth_profile = stream_profiles.get(0);
StreamProfile ir_profile = stream_profiles.get(0);

for (StreamProfile stream_profile : stream_profiles) {
if (depth_profile_found && ir_profile_found)
break;
if (!depth_profile_found && stream_profile.getType().compareTo(StreamType.DEPTH) == 0) {

if (stream_profile.is(Extension.VIDEO_PROFILE)) {
VideoStreamProfile video_stream_profile = stream_profile.as(Extension.VIDEO_PROFILE);

// After using the "as" method we can use the new data type
// for additional operations:
StreamFormat sf = video_stream_profile.getFormat();
int index = stream_profile.getIndex();
StreamType st = stream_profile.getType();
int w = video_stream_profile.getWidth();
int h = video_stream_profile.getHeight();
int fps = video_stream_profile.getFrameRate();

if (w == 640 && h == 480 && fps == 30 && (sf.compareTo(StreamFormat.Z16) == 0)) {
Log.d(TAG, "depth stream: " + index + ":" + st.name() + ":" + sf.name() + ":" + w + "x" + h + "@" + fps + "HZ");

depth_profile = stream_profile;
depth_profile_found = true;
}
}
}
if (!ir_profile_found && stream_profile.getType().compareTo(StreamType.INFRARED) == 0 && stream_profile.getIndex() == 1 ) {

if (stream_profile.is(Extension.VIDEO_PROFILE)) {
VideoStreamProfile video_stream_profile = stream_profile.as(Extension.VIDEO_PROFILE);

// After using the "as" method we can use the new data type
// for additional operations:
StreamFormat sf = video_stream_profile.getFormat();
int index = stream_profile.getIndex();
StreamType st = stream_profile.getType();
int w = video_stream_profile.getWidth();
int h = video_stream_profile.getHeight();
int fps = video_stream_profile.getFrameRate();

if (w == 640 && h == 480 && fps == 30 && (sf.compareTo(StreamFormat.Y8) == 0)) {
Log.d(TAG, "ir stream: " + index + ":" + st.name() + ":" + sf.name() + ":" + w + "x" + h + "@" + fps + "HZ");

ir_profile = stream_profile;
ir_profile_found = true;
}
}
}
}

if (!depth_profile_found && !ir_profile_found) {
if (mDepthSensor != null) {
if (mDepthProfile == null && mIrProfile == null) {
Toast.makeText(this, "The requested profiles are not available in this device ", Toast.LENGTH_LONG).show();
}
else {
List<StreamProfile> requested_profiles = new ArrayList<StreamProfile>();
if (depth_profile_found)
requested_profiles.add(depth_profile);
if (mDepthProfile != null)
requested_profiles.add(mDepthProfile);
else
Toast.makeText(this, "The depth requested profile is not available in this device ", Toast.LENGTH_LONG).show();

if (ir_profile_found)
requested_profiles.add(ir_profile);
if (mIrProfile != null)
requested_profiles.add(mIrProfile);
else
Toast.makeText(this, "The infrared requested profile is not available in this device ", Toast.LENGTH_LONG).show();

depth_sensor.openSensor(requested_profiles);
depth_sensor.start(mDepthFrameHandler);
mDepthSensor.openSensor(requested_profiles);
mDepthSensor.start(mFrameHandler);
}
}
}
Expand Down Expand Up @@ -284,17 +223,9 @@ private synchronized void stop() {
Log.d(TAG, "try stop streaming");
mIsStreaming = false;

if (depth_sensor != null) depth_sensor.stop();

if (mColorizer != null) mColorizer.close();
if (depth_sensor != null) {depth_sensor.closeSensor();}

if (mDevice != null) mDevice.close();

if(mRsContext != null) {
mRsContext.removeDevicesChangedCallback();
mRsContext.close();
mRsContext = null;
if (mDepthSensor != null){
mDepthSensor.stop();
mDepthSensor.closeSensor();
}

mGLSurfaceView.clear();
Expand All @@ -303,4 +234,32 @@ private synchronized void stop() {
Log.e(TAG, "failed to stop streaming");
}
}

private void releaseContext() {
if (mDevice != null) mDevice.close();
if (mColorizer != null) mColorizer.close();

if(mRsContext != null) {
mRsContext.removeDevicesChangedCallback();
mRsContext.close();
mRsContext = null;
}
}

private void assignDepthSensor() {
List<Sensor> sensors = mDevice.querySensors();

for(Sensor s : sensors)
{
if (s.is(Extension.DEPTH_SENSOR)) {
mDepthSensor = s.as(Extension.DEPTH_SENSOR);
}
}
}
private void assignProfiles() {
if (mDepthSensor != null) {
mDepthProfile = mDepthSensor.findProfile(StreamType.DEPTH, -1, 640, 480, StreamFormat.Z16, 30);
mIrProfile = mDepthSensor.findProfile(StreamType.INFRARED, -1, 640, 480, StreamFormat.Y8, 30);
}
ev-mp marked this conversation as resolved.
Show resolved Hide resolved
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,36 @@ public void closeSensor(){
nClose(mHandle);
}

public StreamProfile findProfile(StreamType type, int index, int width, int height, StreamFormat format, int fps) {
List<StreamProfile> profiles = getStreamProfiles();
StreamProfile rv = null;

for (StreamProfile profile : profiles) {
if (profile.getType().compareTo(type) == 0) {

if (profile.is(Extension.VIDEO_PROFILE)) {
VideoStreamProfile video_stream_profile = profile.as(Extension.VIDEO_PROFILE);

// After using the "as" method we can use the new data type
// for additional operations:
StreamFormat sf = video_stream_profile.getFormat();
int idx = profile.getIndex();
int w = video_stream_profile.getWidth();
int h = video_stream_profile.getHeight();
int frameRate = video_stream_profile.getFrameRate();

if ((index == -1 || idx == index) &&
w == width && h == height &&
frameRate == fps && (sf.compareTo(format) == 0)) {
rv = profile;
break;
}
}
}
}
return rv;
}

private static native long[] nGetStreamProfiles(long handle);
private static native void nRelease(long handle);
private static native boolean nIsSensorExtendableTo(long handle, int extension);
Expand Down