Skip to content

Commit

Permalink
Remove vendored netty classes, because we actually use Netty in some …
Browse files Browse the repository at this point in the history
…places.
  • Loading branch information
calvertdw committed Dec 5, 2024
1 parent 1e8d365 commit de7f595
Show file tree
Hide file tree
Showing 16 changed files with 214 additions and 1,844 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import perception_msgs.msg.dds.PlanarRegionsListMessage;
import map_sense.RawGPUPlanarRegionList;
import org.apache.commons.lang3.mutable.MutableInt;
import org.jboss.netty.buffer.ChannelBuffer;
//import org.jboss.netty.buffer.LittleEndianHeapChannelBuffer;
import org.ros.message.Duration;
import org.ros.message.Time;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,36 +70,36 @@ public void onNewMessage(sensor_msgs.CompressedImage ros1Image)
syncedRobot.update();
// if (syncedRobot.getDataReceptionTimerSnapshot().isRunning(3.0))
{
try
{
byte[] payload = ros1Image.getData().array();
int offset = ros1Image.getData().arrayOffset();
BufferedImage bufferedImage = ImageIO.read(new ByteArrayInputStream(payload, offset, payload.length - offset));

YUVPicture picture = converter.fromBufferedImage(bufferedImage, YUVPicture.YUVSubsamplingType.YUV420);
ByteBuffer buffer = encoder.encode(picture, 75);

byte[] data = new byte[buffer.remaining()];
buffer.get(data);

FramePose3DReadOnly ousterPose = syncedRobot.getFramePoseReadOnly(HumanoidReferenceFrames::getOusterLidarFrame);
ousterPose.get(transformToWorld);

VideoPacket message = new VideoPacket();
message.setTimestamp(System.nanoTime());
message.getData().add(data);
message.getPosition().set(ousterPose.getPosition());
message.getOrientation().set(ousterPose.getOrientation());
message.setVideoSource(VideoPacket.VIDEO_SOURCE_MULTISENSE_LEFT_EYE);
message.getIntrinsicParameters().set(HumanoidMessageTools.toIntrinsicParametersMessage(depthCameraIntrinsics));

ros2Helper.publish(PerceptionAPI.VIDEO, message);
}
catch (Exception e)
{
LogTools.error(e.getMessage());
e.printStackTrace();
}
// try
// {
// byte[] payload = ros1Image.getData().array();
// int offset = ros1Image.getData().arrayOffset();
// BufferedImage bufferedImage = ImageIO.read(new ByteArrayInputStream(payload, offset, payload.length - offset));
//
// YUVPicture picture = converter.fromBufferedImage(bufferedImage, YUVPicture.YUVSubsamplingType.YUV420);
// ByteBuffer buffer = encoder.encode(picture, 75);
//
// byte[] data = new byte[buffer.remaining()];
// buffer.get(data);
//
// FramePose3DReadOnly ousterPose = syncedRobot.getFramePoseReadOnly(HumanoidReferenceFrames::getOusterLidarFrame);
// ousterPose.get(transformToWorld);
//
// VideoPacket message = new VideoPacket();
// message.setTimestamp(System.nanoTime());
// message.getData().add(data);
// message.getPosition().set(ousterPose.getPosition());
// message.getOrientation().set(ousterPose.getOrientation());
// message.setVideoSource(VideoPacket.VIDEO_SOURCE_MULTISENSE_LEFT_EYE);
// message.getIntrinsicParameters().set(HumanoidMessageTools.toIntrinsicParametersMessage(depthCameraIntrinsics));
//
// ros2Helper.publish(PerceptionAPI.VIDEO, message);
// }
// catch (Exception e)
// {
// LogTools.error(e.getMessage());
// e.printStackTrace();
// }
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

import perception_msgs.msg.dds.LidarScanMessage;
import controller_msgs.msg.dds.StereoVisionPointCloudMessage;
import org.bytedeco.javacpp.BytePointer;
import org.jboss.netty.buffer.ChannelBuffer;
import sensor_msgs.Image;
import sensor_msgs.PointCloud2;
import us.ihmc.atlas.AtlasRobotModel;
Expand Down Expand Up @@ -94,12 +92,12 @@ public void onNewMessage(PointCloud2 pointCloud2)
Point3D[] l515Points = null;
if (latestL515PointCloud2 != null && zed2Image != null)
{
ChannelBuffer l515NettyImageData = latestL515PointCloud2.getData();
ByteBuffer l515DataByteBuffer = l515NettyImageData.toByteBuffer();
int arrayOffset = l515NettyImageData.arrayOffset();
l515DataByteBuffer.position(arrayOffset);
ByteBuffer offsetByteBuffer = l515DataByteBuffer.slice();
BytePointer imageDataPointer = new BytePointer(offsetByteBuffer);
// ChannelBuffer l515NettyImageData = latestL515PointCloud2.getData();
// ByteBuffer l515DataByteBuffer = l515NettyImageData.toByteBuffer();
// int arrayOffset = l515NettyImageData.arrayOffset();
// l515DataByteBuffer.position(arrayOffset);
// ByteBuffer offsetByteBuffer = l515DataByteBuffer.slice();
// BytePointer imageDataPointer = new BytePointer(offsetByteBuffer);

// l515 is 4 float 32s, X,Y,Z,RGB
// Point4fVector l515Point3fVector = new Point4fVector(imageDataPointer);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import org.bytedeco.opencv.global.opencv_imgcodecs;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.Mat;
import org.jboss.netty.buffer.ChannelBuffer;
import us.ihmc.atlas.AtlasRobotModel;
import us.ihmc.atlas.AtlasRobotVersion;
import us.ihmc.avatar.drcRobot.ROS2SyncedRobotModel;
Expand Down Expand Up @@ -95,18 +94,18 @@ public void onNewMessage(sensor_msgs.Image ros1Image)
try
{

ChannelBuffer nettyImageData = ros1Image.getData();
ByteBuffer dataByteBuffer = nettyImageData.toByteBuffer();
int arrayOffset = nettyImageData.arrayOffset();
dataByteBuffer.position(arrayOffset);
ByteBuffer offsetByteBuffer = dataByteBuffer.slice();
// ChannelBuffer nettyImageData = ros1Image.getData();
// ByteBuffer dataByteBuffer = nettyImageData.toByteBuffer();
// int arrayOffset = nettyImageData.arrayOffset();
// dataByteBuffer.position(arrayOffset);
// ByteBuffer offsetByteBuffer = dataByteBuffer.slice();
// byte[] nettyBackingArray = nettyImageData.array();
// int dataLength = nettyBackingArray.length - arrayOffset;
// ByteBuffer offsetByteBuffer = ByteBuffer.wrap(nettyBackingArray, arrayOffset, dataLength);
// ByteBuffer offsetByteBuffer = new ByteBuffer(0, 0, dataLength, dataLength, nettyBackingArray, arrayOffset);
// System.out.println(dataByteBuffer.isDirect());

BytePointer imageDataPointer = new BytePointer(offsetByteBuffer);
BytePointer imageDataPointer = null;
inputImageMat.data(imageDataPointer);

opencv_imgproc.cvtColor(inputImageMat, decodedImageMat, opencv_imgproc.COLOR_BGR2YUV_I420);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,38 +74,38 @@ private void waitThenAct(sensor_msgs.CompressedImage ros1Image)

private void compute(sensor_msgs.CompressedImage ros1Image)
{
try
{
byte[] payload = ros1Image.getData().array();
int offset = ros1Image.getData().arrayOffset();
BufferedImage bufferedImage = ImageIO.read(new ByteArrayInputStream(payload, offset, payload.length - offset));

YUVPicture picture = converter.fromBufferedImage(bufferedImage, YUVPicture.YUVSubsamplingType.YUV420);
ByteBuffer buffer = encoder.encode(picture, 75);

byte[] data = new byte[buffer.remaining()];
buffer.get(data);

VideoPacket message = new VideoPacket();
message.setTimestamp(System.nanoTime());
message.getData().add(data);

publisher.publish(message);
}
catch (Exception e)
{
LogTools.error(e.getMessage());
e.printStackTrace();
}
// try
// {
// byte[] payload = ros1Image.getData().array();
// int offset = ros1Image.getData().arrayOffset();
// BufferedImage bufferedImage = ImageIO.read(new ByteArrayInputStream(payload, offset, payload.length - offset));
//
// YUVPicture picture = converter.fromBufferedImage(bufferedImage, YUVPicture.YUVSubsamplingType.YUV420);
// ByteBuffer buffer = encoder.encode(picture, 75);
//
// byte[] data = new byte[buffer.remaining()];
// buffer.get(data);
//
// VideoPacket message = new VideoPacket();
// message.setTimestamp(System.nanoTime());
// message.getData().add(data);
//
// publisher.publish(message);
// }
// catch (Exception e)
// {
// LogTools.error(e.getMessage());
// e.printStackTrace();
// }
}

// Eventually it would be nice to use Compressed Image
private void createROS2CompressedImage(sensor_msgs.CompressedImage ros1Image)
{
CompressedImage ros2Image = new CompressedImage();
byte[] data = ros1Image.getData().array();
int dataOffset = ros1Image.getData().arrayOffset();
int length = data.length;
ros2Image.getData().add(data, dataOffset, length - dataOffset);
}
// private void createROS2CompressedImage(sensor_msgs.CompressedImage ros1Image)
// {
// CompressedImage ros2Image = new CompressedImage();
// byte[] data = ros1Image.getData().array();
// int dataOffset = ros1Image.getData().arrayOffset();
// int length = data.length;
// ros2Image.getData().add(data, dataOffset, length - dataOffset);
// }
}
Loading

0 comments on commit de7f595

Please sign in to comment.