From 659f03ac18a66e9e8ab6e79da1766d5f6133fd11 Mon Sep 17 00:00:00 2001 From: KalmanSLightech Date: Sun, 8 Jul 2018 18:49:09 +0800 Subject: [PATCH] change the way of split images --- samples/device/camera.cc | 20 ++++++++++---------- src/device/device.cc | 4 ++-- src/internal/config.cc | 2 +- src/internal/streams.cc | 32 +++++++++++++++++++------------- src/public/utils.cc | 4 ++-- 5 files changed, 34 insertions(+), 28 deletions(-) diff --git a/samples/device/camera.cc b/samples/device/camera.cc index bcd9695..5815cbd 100644 --- a/samples/device/camera.cc +++ b/samples/device/camera.cc @@ -46,8 +46,7 @@ int main(int argc, char *argv[]) { device->SetOptionValue(Option::FRAME_RATE, 25); device->SetOptionValue(Option::IMU_FREQUENCY, 500); */ - device->LogOptionInfos(); - + // device->LogOptionInfos(); // device->RunOptionAction(Option::ZERO_DRIFT_CALIBRATION); std::size_t left_count = 0; @@ -72,6 +71,7 @@ int main(int argc, char *argv[]) { }); std::size_t imu_count = 0; +/* device->SetMotionCallback([&imu_count](const device::MotionData &data) { CHECK_NOTNULL(data.imu); ++imu_count; @@ -86,11 +86,10 @@ int main(int argc, char *argv[]) { << ", gyro_z: " << data.imu->gyro[2] << ", temperature: " << data.imu->temperature; }); - +*/ // Enable this will cache the motion datas until you get them. - device->EnableMotionDatas(); - device->Start(Source::ALL); - + // device->EnableMotionDatas(); + device->Start(Source::VIDEO_STREAMING); cv::namedWindow("frame"); std::size_t motion_count = 0; @@ -100,7 +99,7 @@ int main(int argc, char *argv[]) { device::StreamData left_data = device->GetLatestStreamData(Stream::LEFT); device::StreamData right_data = device->GetLatestStreamData(Stream::RIGHT); - +/* auto &&motion_datas = device->GetMotionDatas(); motion_count += motion_datas.size(); for (auto &&data : motion_datas) { @@ -114,16 +113,17 @@ int main(int argc, char *argv[]) { << ", gyro_z: " << data.imu->gyro[2] << ", temperature: " << data.imu->temperature; } - +*/ cv::Mat left_img( - left_data.frame->height(), left_data.frame->width(), CV_8UC1, + left_data.frame->height(), left_data.frame->width(), CV_8UC2, left_data.frame->data()); cv::Mat right_img( - right_data.frame->height(), right_data.frame->width(), CV_8UC1, + right_data.frame->height(), right_data.frame->width(), CV_8UC2, right_data.frame->data()); cv::Mat img; cv::hconcat(left_img, right_img, img); + cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2); cv::imshow("frame", img); char key = static_cast(cv::waitKey(1)); diff --git a/src/device/device.cc b/src/device/device.cc index 576f0ae..0b08461 100644 --- a/src/device/device.cc +++ b/src/device/device.cc @@ -87,7 +87,7 @@ Device::Device(const Model &model, std::shared_ptr device) channels_(std::make_shared(device)), motions_(std::make_shared(channels_)) { VLOG(2) << __func__; - ReadAllInfos(); + //ReadAllInfos(); } Device::~Device() { @@ -96,7 +96,7 @@ Device::~Device() { std::shared_ptr Device::Create( const std::string &name, std::shared_ptr device) { - if (name == "MYNTEYE") { + if (name == "MYNTEYE" || name == "CX3-UVC") { return std::make_shared(device); } else if (strings::starts_with(name, "MYNT-EYE-")) { // TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000 diff --git a/src/internal/config.cc b/src/internal/config.cc index f9d5a53..9b3188d 100644 --- a/src/internal/config.cc +++ b/src/internal/config.cc @@ -31,6 +31,6 @@ const std::map option_supports_map = { const std::map> stream_requests_map = { {Model::STANDARD, - {{Capabilities::STEREO, {{752, 480, Format::YUYV, 25}}}}}}; + {{Capabilities::STEREO, {{1280, 480, Format::YUYV, 25}}}}}}; MYNTEYE_END_NAMESPACE diff --git a/src/internal/streams.cc b/src/internal/streams.cc index 4b049e3..d97a656 100644 --- a/src/internal/streams.cc +++ b/src/internal/streams.cc @@ -48,7 +48,7 @@ bool unpack_stereo_img_data( // << ", timestamp="<< std::dec << img_packet.timestamp // << ", exposure_time="<< std::dec << img_packet.exposure_time // << ", checksum=0x" << std::hex << static_cast(img_packet.checksum); - +/* if (img_packet.header != 0x3B) { LOG(WARNING) << "Image packet header must be 0x3B, but 0x" << std::hex << std::uppercase << std::setw(2) << std::setfill('0') @@ -68,7 +68,7 @@ bool unpack_stereo_img_data( << static_cast(checksum) << " now"; return false; } - +*/ img->frame_id = img_packet.frame_id; img->timestamp = img_packet.timestamp; img->exposure_time = img_packet.exposure_time; @@ -79,11 +79,14 @@ bool unpack_left_img_pixels( const void *data, const StreamRequest &request, Streams::frame_t *frame) { CHECK_NOTNULL(frame); CHECK_EQ(request.format, Format::YUYV); - CHECK_EQ(frame->format(), Format::GREY); + CHECK_EQ(frame->format(), Format::YUYV); auto data_new = reinterpret_cast(data); - std::size_t n = frame->width() * frame->height(); - for (std::size_t i = 0; i < n; i++) { - frame->data()[i] = *(data_new + (i * 2)); + std::size_t w = frame->width() * 2; + std::size_t h = frame->height(); + for(std::size_t i = 0; i < h; i++) { + for(std::size_t j = 0; j < w; j++) { + frame->data()[i * w + j] = *(data_new + 2 * i * w + j); + } } return true; } @@ -92,11 +95,14 @@ bool unpack_right_img_pixels( const void *data, const StreamRequest &request, Streams::frame_t *frame) { CHECK_NOTNULL(frame); CHECK_EQ(request.format, Format::YUYV); - CHECK_EQ(frame->format(), Format::GREY); + CHECK_EQ(frame->format(), Format::YUYV); auto data_new = reinterpret_cast(data); - std::size_t n = frame->width() * frame->height(); - for (std::size_t i = 0; i < n; i++) { - frame->data()[i] = *(data_new + (i * 2 + 1)); + std::size_t w = frame->width() * 2; + std::size_t h = frame->height(); + for(std::size_t i = 0; i < h; i++) { + for(std::size_t j = 0; j < w; j++) { + frame->data()[i * w + j] = *(data_new + (2 * i + 1) * w + j); + } } return true; } @@ -142,13 +148,13 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) { switch (capability) { case Capabilities::STEREO: { // alloc left - AllocStreamData(Stream::LEFT, request, Format::GREY); + AllocStreamData(Stream::LEFT, request, Format::YUYV); auto &&left_data = stream_datas_map_[Stream::LEFT].back(); // unpack img data if (unpack_img_data_map_[Stream::LEFT]( data, request, left_data.img.get())) { // alloc right - AllocStreamData(Stream::RIGHT, request, Format::GREY); + AllocStreamData(Stream::RIGHT, request, Format::YUYV); auto &&right_data = stream_datas_map_[Stream::RIGHT].back(); *right_data.img = *left_data.img; // unpack frame @@ -281,7 +287,7 @@ void Streams::AllocStreamData( } if (!data.frame) { data.frame = std::make_shared( - request.width, request.height, format, nullptr); + request.width / 2, request.height, format, nullptr); } stream_datas_map_[stream].push_back(data); } diff --git a/src/public/utils.cc b/src/public/utils.cc index ad8b97a..c8d2d77 100644 --- a/src/public/utils.cc +++ b/src/public/utils.cc @@ -32,7 +32,7 @@ std::shared_ptr select() { LOG(ERROR) << "No MYNT EYE devices :("; return nullptr; } - +/* LOG(INFO) << "MYNT EYE devices:"; for (size_t i = 0; i < n; i++) { auto &&device = devices[i]; @@ -40,7 +40,7 @@ std::shared_ptr select() { << ", name: " << device->GetInfo(Info::DEVICE_NAME) << ", sn: " << device->GetInfo(Info::SERIAL_NUMBER); } - +*/ std::shared_ptr device = nullptr; if (n <= 1) { device = devices[0];