change the way of split images
This commit is contained in:
parent
6c25f06005
commit
659f03ac18
|
@ -46,8 +46,7 @@ int main(int argc, char *argv[]) {
|
|||
device->SetOptionValue(Option::FRAME_RATE, 25);
|
||||
device->SetOptionValue(Option::IMU_FREQUENCY, 500);
|
||||
*/
|
||||
device->LogOptionInfos();
|
||||
|
||||
// device->LogOptionInfos();
|
||||
// device->RunOptionAction(Option::ZERO_DRIFT_CALIBRATION);
|
||||
|
||||
std::size_t left_count = 0;
|
||||
|
@ -72,6 +71,7 @@ int main(int argc, char *argv[]) {
|
|||
});
|
||||
|
||||
std::size_t imu_count = 0;
|
||||
/*
|
||||
device->SetMotionCallback([&imu_count](const device::MotionData &data) {
|
||||
CHECK_NOTNULL(data.imu);
|
||||
++imu_count;
|
||||
|
@ -86,11 +86,10 @@ int main(int argc, char *argv[]) {
|
|||
<< ", gyro_z: " << data.imu->gyro[2]
|
||||
<< ", temperature: " << data.imu->temperature;
|
||||
});
|
||||
|
||||
*/
|
||||
// Enable this will cache the motion datas until you get them.
|
||||
device->EnableMotionDatas();
|
||||
device->Start(Source::ALL);
|
||||
|
||||
// device->EnableMotionDatas();
|
||||
device->Start(Source::VIDEO_STREAMING);
|
||||
cv::namedWindow("frame");
|
||||
|
||||
std::size_t motion_count = 0;
|
||||
|
@ -100,7 +99,7 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
device::StreamData left_data = device->GetLatestStreamData(Stream::LEFT);
|
||||
device::StreamData right_data = device->GetLatestStreamData(Stream::RIGHT);
|
||||
|
||||
/*
|
||||
auto &&motion_datas = device->GetMotionDatas();
|
||||
motion_count += motion_datas.size();
|
||||
for (auto &&data : motion_datas) {
|
||||
|
@ -114,16 +113,17 @@ int main(int argc, char *argv[]) {
|
|||
<< ", gyro_z: " << data.imu->gyro[2]
|
||||
<< ", temperature: " << data.imu->temperature;
|
||||
}
|
||||
|
||||
*/
|
||||
cv::Mat left_img(
|
||||
left_data.frame->height(), left_data.frame->width(), CV_8UC1,
|
||||
left_data.frame->height(), left_data.frame->width(), CV_8UC2,
|
||||
left_data.frame->data());
|
||||
cv::Mat right_img(
|
||||
right_data.frame->height(), right_data.frame->width(), CV_8UC1,
|
||||
right_data.frame->height(), right_data.frame->width(), CV_8UC2,
|
||||
right_data.frame->data());
|
||||
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_img, right_img, img);
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
|
|
|
@ -87,7 +87,7 @@ Device::Device(const Model &model, std::shared_ptr<uvc::device> device)
|
|||
channels_(std::make_shared<Channels>(device)),
|
||||
motions_(std::make_shared<Motions>(channels_)) {
|
||||
VLOG(2) << __func__;
|
||||
ReadAllInfos();
|
||||
//ReadAllInfos();
|
||||
}
|
||||
|
||||
Device::~Device() {
|
||||
|
@ -96,7 +96,7 @@ Device::~Device() {
|
|||
|
||||
std::shared_ptr<Device> Device::Create(
|
||||
const std::string &name, std::shared_ptr<uvc::device> device) {
|
||||
if (name == "MYNTEYE") {
|
||||
if (name == "MYNTEYE" || name == "CX3-UVC") {
|
||||
return std::make_shared<StandardDevice>(device);
|
||||
} else if (strings::starts_with(name, "MYNT-EYE-")) {
|
||||
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
|
||||
|
|
|
@ -31,6 +31,6 @@ const std::map<Model, OptionSupports> option_supports_map = {
|
|||
const std::map<Model, std::map<Capabilities, StreamRequests>>
|
||||
stream_requests_map = {
|
||||
{Model::STANDARD,
|
||||
{{Capabilities::STEREO, {{752, 480, Format::YUYV, 25}}}}}};
|
||||
{{Capabilities::STEREO, {{1280, 480, Format::YUYV, 25}}}}}};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -48,7 +48,7 @@ bool unpack_stereo_img_data(
|
|||
// << ", timestamp="<< std::dec << img_packet.timestamp
|
||||
// << ", exposure_time="<< std::dec << img_packet.exposure_time
|
||||
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
|
||||
|
||||
/*
|
||||
if (img_packet.header != 0x3B) {
|
||||
LOG(WARNING) << "Image packet header must be 0x3B, but 0x" << std::hex
|
||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
||||
|
@ -68,7 +68,7 @@ bool unpack_stereo_img_data(
|
|||
<< static_cast<int>(checksum) << " now";
|
||||
return false;
|
||||
}
|
||||
|
||||
*/
|
||||
img->frame_id = img_packet.frame_id;
|
||||
img->timestamp = img_packet.timestamp;
|
||||
img->exposure_time = img_packet.exposure_time;
|
||||
|
@ -79,11 +79,14 @@ bool unpack_left_img_pixels(
|
|||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::YUYV);
|
||||
CHECK_EQ(frame->format(), Format::GREY);
|
||||
CHECK_EQ(frame->format(), Format::YUYV);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = frame->width() * frame->height();
|
||||
for (std::size_t i = 0; i < n; i++) {
|
||||
frame->data()[i] = *(data_new + (i * 2));
|
||||
std::size_t w = frame->width() * 2;
|
||||
std::size_t h = frame->height();
|
||||
for(std::size_t i = 0; i < h; i++) {
|
||||
for(std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[i * w + j] = *(data_new + 2 * i * w + j);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -92,11 +95,14 @@ bool unpack_right_img_pixels(
|
|||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::YUYV);
|
||||
CHECK_EQ(frame->format(), Format::GREY);
|
||||
CHECK_EQ(frame->format(), Format::YUYV);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = frame->width() * frame->height();
|
||||
for (std::size_t i = 0; i < n; i++) {
|
||||
frame->data()[i] = *(data_new + (i * 2 + 1));
|
||||
std::size_t w = frame->width() * 2;
|
||||
std::size_t h = frame->height();
|
||||
for(std::size_t i = 0; i < h; i++) {
|
||||
for(std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[i * w + j] = *(data_new + (2 * i + 1) * w + j);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -142,13 +148,13 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) {
|
|||
switch (capability) {
|
||||
case Capabilities::STEREO: {
|
||||
// alloc left
|
||||
AllocStreamData(Stream::LEFT, request, Format::GREY);
|
||||
AllocStreamData(Stream::LEFT, request, Format::YUYV);
|
||||
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
|
||||
// unpack img data
|
||||
if (unpack_img_data_map_[Stream::LEFT](
|
||||
data, request, left_data.img.get())) {
|
||||
// alloc right
|
||||
AllocStreamData(Stream::RIGHT, request, Format::GREY);
|
||||
AllocStreamData(Stream::RIGHT, request, Format::YUYV);
|
||||
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
|
||||
*right_data.img = *left_data.img;
|
||||
// unpack frame
|
||||
|
@ -281,7 +287,7 @@ void Streams::AllocStreamData(
|
|||
}
|
||||
if (!data.frame) {
|
||||
data.frame = std::make_shared<frame_t>(
|
||||
request.width, request.height, format, nullptr);
|
||||
request.width / 2, request.height, format, nullptr);
|
||||
}
|
||||
stream_datas_map_[stream].push_back(data);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ std::shared_ptr<Device> select() {
|
|||
LOG(ERROR) << "No MYNT EYE devices :(";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/*
|
||||
LOG(INFO) << "MYNT EYE devices:";
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
auto &&device = devices[i];
|
||||
|
@ -40,7 +40,7 @@ std::shared_ptr<Device> select() {
|
|||
<< ", name: " << device->GetInfo(Info::DEVICE_NAME)
|
||||
<< ", sn: " << device->GetInfo(Info::SERIAL_NUMBER);
|
||||
}
|
||||
|
||||
*/
|
||||
std::shared_ptr<Device> device = nullptr;
|
||||
if (n <= 1) {
|
||||
device = devices[0];
|
||||
|
|
Loading…
Reference in New Issue
Block a user