Add some resolution,format and frame rate.
This commit is contained in:
parent
b139fd21c3
commit
33d3f15b86
|
@ -76,6 +76,8 @@ enum class Capabilities : std::uint8_t {
|
|||
STEREO,
|
||||
/** Provides color stream */
|
||||
COLOR,
|
||||
/** Provide stereo color stream */
|
||||
STEREO_COLOR,
|
||||
/** Provides depth stream */
|
||||
DEPTH,
|
||||
/** Provides point cloud stream */
|
||||
|
@ -227,6 +229,40 @@ enum class AddOns : std::uint8_t {
|
|||
LAST
|
||||
};
|
||||
|
||||
/**
|
||||
* @ingroup enumerations
|
||||
* @brief Camera supported resolution.
|
||||
*/
|
||||
enum class Resolution : std::uint8_t {
|
||||
/** 480x752 */
|
||||
RES_480x752,
|
||||
/** 1280x400 */
|
||||
RES_1280x400,
|
||||
/** 2560x800 */
|
||||
RES_2560x800,
|
||||
/** Last guard */
|
||||
LAST
|
||||
};
|
||||
|
||||
/**
|
||||
* @ingroup enumerations
|
||||
* @brief Camera supported frame rate.
|
||||
*/
|
||||
enum class FrameRate : std::uint8_t {
|
||||
/** 10 fps */
|
||||
RATE_10_FPS,
|
||||
/** 20 fps */
|
||||
RATE_20_FPS,
|
||||
/** 20 fps */
|
||||
RATE_25_FPS,
|
||||
/** 30 fps */
|
||||
RATE_30_FPS,
|
||||
/** 60 fps */
|
||||
RATE_60_FPS,
|
||||
/** Last guard */
|
||||
LAST
|
||||
};
|
||||
|
||||
#define MYNTEYE_ENUM_HELPERS(TYPE) \
|
||||
MYNTEYE_API const char *to_string(const TYPE &value); \
|
||||
inline bool is_valid(const TYPE &value) { \
|
||||
|
@ -250,6 +286,8 @@ MYNTEYE_ENUM_HELPERS(Info)
|
|||
MYNTEYE_ENUM_HELPERS(Option)
|
||||
MYNTEYE_ENUM_HELPERS(Source)
|
||||
MYNTEYE_ENUM_HELPERS(AddOns)
|
||||
MYNTEYE_ENUM_HELPERS(Resolution)
|
||||
MYNTEYE_ENUM_HELPERS(FrameRate)
|
||||
|
||||
#undef MYNTEYE_ENUM_HELPERS
|
||||
|
||||
|
@ -266,6 +304,8 @@ enum class Format : std::uint32_t {
|
|||
GREY = MYNTEYE_FOURCC('G', 'R', 'E', 'Y'),
|
||||
/** YUV 4:2:2, 16 bits per pixel */
|
||||
YUYV = MYNTEYE_FOURCC('Y', 'U', 'Y', 'V'),
|
||||
/** RGB 8:8:8, 24 bits per pixel */
|
||||
RGB888 = MYNTEYE_FOURCC('R', 'G', 'B', '3'),
|
||||
/** Last guard */
|
||||
LAST
|
||||
};
|
||||
|
@ -293,6 +333,52 @@ struct MYNTEYE_API StreamRequest {
|
|||
/** Stream frames per second (unused) */
|
||||
std::uint16_t fps;
|
||||
|
||||
StreamRequest() {}
|
||||
|
||||
StreamRequest(
|
||||
std::uint16_t width, std::uint16_t height, Format format,
|
||||
std::uint16_t fps)
|
||||
: width(width), height(height), format(format), fps(fps) {}
|
||||
|
||||
StreamRequest(Resolution res, Format format, FrameRate rate)
|
||||
: format(format) {
|
||||
switch (res) {
|
||||
case Resolution::RES_480x752:
|
||||
width = 480, height = 752;
|
||||
break;
|
||||
case Resolution::RES_1280x400:
|
||||
width = 1280, height = 400;
|
||||
break;
|
||||
case Resolution::RES_2560x800:
|
||||
width = 2560, height = 800;
|
||||
break;
|
||||
default:
|
||||
width = 480, height = 752;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (rate) {
|
||||
case FrameRate::RATE_10_FPS:
|
||||
fps = 10;
|
||||
break;
|
||||
case FrameRate::RATE_20_FPS:
|
||||
fps = 20;
|
||||
break;
|
||||
case FrameRate::RATE_25_FPS:
|
||||
fps = 25;
|
||||
break;
|
||||
case FrameRate::RATE_30_FPS:
|
||||
fps = 30;
|
||||
break;
|
||||
case FrameRate::RATE_60_FPS:
|
||||
fps = 60;
|
||||
break;
|
||||
default:
|
||||
fps = 25;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool operator==(const StreamRequest &other) const {
|
||||
return width == other.width && height == other.height &&
|
||||
format == other.format && fps == other.fps;
|
||||
|
|
|
@ -24,7 +24,8 @@ int main(int argc, char *argv[]) {
|
|||
auto &&api = API::Create(argc, argv);
|
||||
if (!api)
|
||||
return 1;
|
||||
|
||||
api->SetStreamRequest(
|
||||
Resolution::RES_2560x800, Format::YUYV, FrameRate::RATE_20_FPS);
|
||||
// api->SetOptionValue(Option::FRAME_RATE, 25);
|
||||
// api->SetOptionValue(Option::IMU_FREQUENCY, 500);
|
||||
api->SetOptionValue(Option::IR_CONTROL, 80);
|
||||
|
@ -105,7 +106,7 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
auto &&motion_datas = api->GetMotionDatas();
|
||||
motion_count += motion_datas.size();
|
||||
for (auto &&data : motion_datas) {
|
||||
for (auto &&data : motion_datas) {
|
||||
LOG(INFO) << ", timestamp: " << data.imu->timestamp
|
||||
<< ", accel_x: " << data.imu->accel[0]
|
||||
<< ", accel_y: " << data.imu->accel[1]
|
||||
|
|
|
@ -50,6 +50,8 @@ int main(int argc, char *argv[]) {
|
|||
// device->RunOptionAction(Option::ZERO_DRIFT_CALIBRATION);
|
||||
|
||||
std::size_t left_count = 0;
|
||||
device->SetStreamRequest(
|
||||
Resolution::RES_2560x800, Format::YUYV, FrameRate::RATE_30_FPS);
|
||||
device->SetStreamCallback(
|
||||
Stream::LEFT, [&left_count](const device::StreamData &data) {
|
||||
CHECK_NOTNULL(data.img);
|
||||
|
@ -75,7 +77,7 @@ int main(int argc, char *argv[]) {
|
|||
device->SetMotionCallback([&imu_count](const device::MotionData &data) {
|
||||
CHECK_NOTNULL(data.imu);
|
||||
++imu_count;
|
||||
VLOG(2) << "Imu count: " << imu_count;
|
||||
VLOG(2) << "Imu count: " << imu_count;
|
||||
VLOG(2) << ", timestamp: " << data.imu->timestamp
|
||||
<< ", accel_x: " << data.imu->accel[0]
|
||||
<< ", accel_y: " << data.imu->accel[1]
|
||||
|
@ -101,7 +103,7 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
auto &&motion_datas = device->GetMotionDatas();
|
||||
motion_count += motion_datas.size();
|
||||
for (auto &&data : motion_datas) {
|
||||
for (auto &&data : motion_datas) {
|
||||
LOG(INFO) << ", timestamp: " << data.imu->timestamp
|
||||
<< ", accel_x: " << data.imu->accel[0]
|
||||
<< ", accel_y: " << data.imu->accel[1]
|
||||
|
|
|
@ -108,7 +108,7 @@ int main(int argc, char *argv[]) {
|
|||
const auto frame_empty = [&frame]() { return frame == nullptr; };
|
||||
|
||||
uvc::set_device_mode(
|
||||
*device, 1280, 480, static_cast<int>(Format::YUYV), 25,
|
||||
*device, 1280, 400, static_cast<int>(Format::RGB888), 20,
|
||||
[&mtx, &cv, &frame, &frame_ready](
|
||||
const void *data, std::function<void()> continuation) {
|
||||
// reinterpret_cast<const std::uint8_t *>(data);
|
||||
|
@ -143,8 +143,8 @@ int main(int argc, char *argv[]) {
|
|||
}
|
||||
|
||||
// only lastest frame is valid
|
||||
cv::Mat img(480, 1280, CV_8UC2, const_cast<void *>(frame->data));
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
cv::Mat img(400, 1280, CV_8UC3, const_cast<void *>(frame->data));
|
||||
// cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
frame = nullptr;
|
||||
|
|
|
@ -280,6 +280,11 @@ bool API::Supports(const AddOns &addon) const {
|
|||
return device_->Supports(addon);
|
||||
}
|
||||
|
||||
void API::SetStreamRequest(
|
||||
const Resolution &res, const Format &format, const FrameRate &rate) {
|
||||
device_->SetStreamRequest(res, format, rate);
|
||||
}
|
||||
|
||||
const std::vector<StreamRequest> &API::GetStreamRequests(
|
||||
const Capabilities &capability) const {
|
||||
return device_->GetStreamRequests(capability);
|
||||
|
|
|
@ -69,7 +69,7 @@ struct MYNTEYE_API MotionData {
|
|||
|
||||
bool operator==(const MotionData &other) const {
|
||||
if (imu && other.imu) {
|
||||
return imu->timestamp == other.imu->timestamp;
|
||||
return imu->timestamp == other.imu->timestamp;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -144,6 +144,11 @@ class MYNTEYE_API API {
|
|||
*/
|
||||
bool Supports(const AddOns &addon) const;
|
||||
|
||||
/**
|
||||
* set the stream request.
|
||||
*/
|
||||
void SetStreamRequest(
|
||||
const Resolution &res, const Format &format, const FrameRate &rate);
|
||||
/**
|
||||
* Get all stream requests of the capability.
|
||||
*/
|
||||
|
|
|
@ -11,14 +11,10 @@
|
|||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include "api/synthetic.h"
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <stdexcept>
|
||||
#include <opencv2/imgproc/imgproc.hpp>
|
||||
|
||||
#include "api/plugin.h"
|
||||
#include "api/processor/depth_processor.h"
|
||||
#include "api/processor/disparity_normalized_processor.h"
|
||||
|
@ -27,6 +23,7 @@
|
|||
#include "api/processor/points_processor.h"
|
||||
#include "api/processor/processor.h"
|
||||
#include "api/processor/rectify_processor.h"
|
||||
#include "api/synthetic.h"
|
||||
#include "device/device.h"
|
||||
|
||||
#define RECTIFY_PROC_PERIOD 0
|
||||
|
@ -42,9 +39,15 @@ namespace {
|
|||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
||||
// TODO(JohnZhao) Support different format frame to cv::Mat
|
||||
CHECK_EQ(frame->format(), Format::YUYV);
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2,frame->data());
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
return img;
|
||||
if (frame->format() == Format::YUYV) {
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
return img;
|
||||
} else if (frame->format() == Format::RGB888) {
|
||||
return cv::Mat(frame->height(), frame->width(), CV_8UC3, frame->data());
|
||||
} else {
|
||||
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
||||
}
|
||||
}
|
||||
|
||||
api::StreamData data2api(const device::StreamData &data) {
|
||||
|
|
|
@ -100,10 +100,10 @@ std::shared_ptr<Device> Device::Create(
|
|||
return std::make_shared<StandardDevice>(device);
|
||||
} else if (strings::starts_with(name, "MYNT-EYE-")) {
|
||||
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
|
||||
std::string model_s = name.substr(9,5);
|
||||
std::string model_s = name.substr(9, 5);
|
||||
VLOG(2) << "MYNE EYE Model: " << model_s;
|
||||
DeviceModel model(model_s);
|
||||
if(model.type == 'S') {
|
||||
if (model.type == 'S') {
|
||||
switch (model.custom_code) {
|
||||
case '0':
|
||||
return std::make_shared<StandardDevice>(device);
|
||||
|
@ -432,13 +432,22 @@ std::vector<device::MotionData> Device::GetMotionDatas() {
|
|||
return motions_->GetMotionDatas();
|
||||
}
|
||||
|
||||
void Device::SetStreamRequest(
|
||||
const Resolution &res, const Format &format, const FrameRate &rate) {
|
||||
StreamRequest request(res, format, rate);
|
||||
request_ = request;
|
||||
}
|
||||
|
||||
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
|
||||
try {
|
||||
return stream_config_requests_.at(capability);
|
||||
} catch (const std::out_of_range &e) {
|
||||
auto &&requests = GetStreamRequests(capability);
|
||||
if (requests.size() >= 1) {
|
||||
VLOG(2) << "Select the first one stream request of " << capability;
|
||||
for (auto &&request : requests) {
|
||||
if (request == request_)
|
||||
return request;
|
||||
}
|
||||
return requests[0];
|
||||
} else {
|
||||
LOG(FATAL) << "Please config the stream request of " << capability;
|
||||
|
@ -455,53 +464,48 @@ void Device::StartVideoStreaming() {
|
|||
streams_ = std::make_shared<Streams>(GetKeyStreams());
|
||||
|
||||
// if stream capabilities are supported with subdevices of device_
|
||||
/*
|
||||
Capabilities stream_capabilities[] = {
|
||||
Capabilities::STEREO,
|
||||
Capabilities::COLOR,
|
||||
Capabilities::DEPTH,
|
||||
Capabilities::POINTS,
|
||||
Capabilities::FISHEYE,
|
||||
Capabilities::INFRARED,
|
||||
Capabilities::INFRARED2
|
||||
};
|
||||
Capabilities::STEREO, Capabilities::COLOR,
|
||||
Capabilities::STEREO_COLOR, Capabilities::DEPTH,
|
||||
Capabilities::POINTS, Capabilities::FISHEYE,
|
||||
Capabilities::INFRARED, Capabilities::INFRARED2};
|
||||
for (auto &&capability : stream_capabilities) {
|
||||
}
|
||||
*/
|
||||
if (Supports(Capabilities::STEREO)) {
|
||||
// do stream request selection if more than one request of each stream
|
||||
auto &&stream_request = GetStreamRequest(Capabilities::STEREO);
|
||||
if (Supports(capability)) {
|
||||
// do stream request selection if more than one request of each stream
|
||||
auto &&stream_request = GetStreamRequest(capability);
|
||||
|
||||
streams_->ConfigStream(Capabilities::STEREO, stream_request);
|
||||
uvc::set_device_mode(
|
||||
*device_, stream_request.width, stream_request.height,
|
||||
static_cast<int>(stream_request.format), stream_request.fps,
|
||||
[this](const void *data, std::function<void()> continuation) {
|
||||
// drop the first stereo stream data
|
||||
static std::uint8_t drop_count = 1;
|
||||
if (drop_count > 0) {
|
||||
--drop_count;
|
||||
continuation();
|
||||
return;
|
||||
}
|
||||
// auto &&time_beg = times::now();
|
||||
{
|
||||
std::lock_guard<std::mutex> _(mtx_streams_);
|
||||
if (streams_->PushStream(Capabilities::STEREO, data)) {
|
||||
CallbackPushedStreamData(Stream::LEFT);
|
||||
CallbackPushedStreamData(Stream::RIGHT);
|
||||
streams_->ConfigStream(capability, stream_request);
|
||||
uvc::set_device_mode(
|
||||
*device_, stream_request.width, stream_request.height,
|
||||
static_cast<int>(stream_request.format), stream_request.fps,
|
||||
[this, capability](
|
||||
const void *data, std::function<void()> continuation) {
|
||||
// drop the first stereo stream data
|
||||
static std::uint8_t drop_count = 1;
|
||||
if (drop_count > 0) {
|
||||
--drop_count;
|
||||
continuation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
continuation();
|
||||
OnStereoStreamUpdate();
|
||||
// VLOG(2) << "Stereo video callback cost "
|
||||
// << times::count<times::milliseconds>(times::now() - time_beg)
|
||||
// << " ms";
|
||||
});
|
||||
} else {
|
||||
LOG(FATAL) << "Not any stream capabilities are supported by this device";
|
||||
// auto &&time_beg = times::now();
|
||||
{
|
||||
std::lock_guard<std::mutex> _(mtx_streams_);
|
||||
if (streams_->PushStream(capability, data)) {
|
||||
CallbackPushedStreamData(Stream::LEFT);
|
||||
CallbackPushedStreamData(Stream::RIGHT);
|
||||
}
|
||||
}
|
||||
continuation();
|
||||
OnStereoStreamUpdate();
|
||||
// VLOG(2) << "Stereo video callback cost "
|
||||
// << times::count<times::milliseconds>(times::now() - time_beg)
|
||||
// << " ms";
|
||||
});
|
||||
} else {
|
||||
// LOG(FATAL) << "Not any stream capabilities are supported by this
|
||||
// device";
|
||||
}
|
||||
}
|
||||
|
||||
uvc::start_streaming(*device_, 0);
|
||||
video_streaming_ = true;
|
||||
}
|
||||
|
|
|
@ -103,6 +103,11 @@ class MYNTEYE_API Device {
|
|||
*/
|
||||
bool Supports(const AddOns &addon) const;
|
||||
|
||||
/**
|
||||
* set the stream request.
|
||||
*/
|
||||
void SetStreamRequest(
|
||||
const Resolution &res, const Format &format, const FrameRate &rate);
|
||||
/**
|
||||
* Get all stream requests of the capability.
|
||||
*/
|
||||
|
@ -286,6 +291,7 @@ class MYNTEYE_API Device {
|
|||
|
||||
private:
|
||||
Model model_;
|
||||
StreamRequest request_;
|
||||
std::shared_ptr<uvc::device> device_;
|
||||
std::shared_ptr<DeviceInfo> device_info_;
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ const std::map<Model, StreamSupports> stream_supports_map = {
|
|||
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}}};
|
||||
|
||||
const std::map<Model, CapabilitiesSupports> capabilities_supports_map = {
|
||||
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}}};
|
||||
{Model::STANDARD, {Capabilities::STEREO_COLOR, Capabilities::IMU}}};
|
||||
|
||||
const std::map<Model, OptionSupports> option_supports_map = {
|
||||
{Model::STANDARD,
|
||||
|
@ -31,6 +31,19 @@ const std::map<Model, OptionSupports> option_supports_map = {
|
|||
const std::map<Model, std::map<Capabilities, StreamRequests>>
|
||||
stream_requests_map = {
|
||||
{Model::STANDARD,
|
||||
{{Capabilities::STEREO, {{1280, 480, Format::YUYV, 25}}}}}};
|
||||
{{Capabilities::STEREO, {{480, 752, Format::YUYV, 25}}},
|
||||
{Capabilities::STEREO_COLOR,
|
||||
{{1280, 400, Format::YUYV, 20},
|
||||
{1280, 400, Format::YUYV, 30},
|
||||
{1280, 400, Format::YUYV, 60},
|
||||
{2560, 800, Format::YUYV, 10},
|
||||
{2560, 800, Format::YUYV, 20},
|
||||
{2560, 800, Format::YUYV, 30},
|
||||
{1280, 400, Format::RGB888, 20},
|
||||
{1280, 400, Format::RGB888, 30},
|
||||
{1280, 400, Format::RGB888, 60},
|
||||
{2560, 800, Format::RGB888, 10},
|
||||
{2560, 800, Format::RGB888, 20},
|
||||
{2560, 800, Format::RGB888, 30}}}}}};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -27,13 +27,18 @@ MYNTEYE_BEGIN_NAMESPACE
|
|||
using StreamSupports = std::set<Stream>;
|
||||
using CapabilitiesSupports = std::set<Capabilities>;
|
||||
using OptionSupports = std::set<Option>;
|
||||
|
||||
using ResolutionSupports = std::set<Resolution>;
|
||||
using FrameRateSupports = std::set<FrameRate>;
|
||||
extern const std::map<Model, StreamSupports> stream_supports_map;
|
||||
extern const std::map<Model, CapabilitiesSupports> capabilities_supports_map;
|
||||
extern const std::map<Model, OptionSupports> option_supports_map;
|
||||
extern const std::map<Model, ResolutionSupports> resolution_supports_map;
|
||||
|
||||
using StreamRequests = std::vector<StreamRequest>;
|
||||
|
||||
extern const std::map<Model, std::map<Resolution, FrameRateSupports>>
|
||||
framerate_Supports_supports_map;
|
||||
|
||||
extern const std::map<Model, std::map<Capabilities, StreamRequests>>
|
||||
stream_requests_map;
|
||||
|
||||
|
|
|
@ -85,8 +85,8 @@ bool unpack_left_img_pixels(
|
|||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t w = frame->width() * 2;
|
||||
std::size_t h = frame->height();
|
||||
for(std::size_t i = 0; i < h; i++) {
|
||||
for(std::size_t j = 0; j < w; j++) {
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[i * w + j] = *(data_new + 2 * i * w + j);
|
||||
}
|
||||
}
|
||||
|
@ -101,8 +101,8 @@ bool unpack_right_img_pixels(
|
|||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t w = frame->width() * 2;
|
||||
std::size_t h = frame->height();
|
||||
for(std::size_t i = 0; i < h; i++) {
|
||||
for(std::size_t j = 0; j < w; j++) {
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[i * w + j] = *(data_new + (2 * i + 1) * w + j);
|
||||
}
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ Streams::Streams(const std::vector<Stream> key_streams)
|
|||
stream_capabilities_(
|
||||
{Capabilities::STEREO, Capabilities::COLOR, Capabilities::DEPTH,
|
||||
Capabilities::POINTS, Capabilities::FISHEYE, Capabilities::INFRARED,
|
||||
Capabilities::INFRARED2}),
|
||||
Capabilities::INFRARED2, Capabilities::STEREO_COLOR}),
|
||||
unpack_img_data_map_(
|
||||
{{Stream::LEFT, unpack_stereo_img_data},
|
||||
{Stream::RIGHT, unpack_stereo_img_data}}),
|
||||
|
@ -148,7 +148,7 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) {
|
|||
auto &&request = GetStreamConfigRequest(capability);
|
||||
bool pushed = false;
|
||||
switch (capability) {
|
||||
case Capabilities::STEREO: {
|
||||
case Capabilities::STEREO_COLOR: {
|
||||
// alloc left
|
||||
AllocStreamData(Stream::LEFT, request, Format::YUYV);
|
||||
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
|
||||
|
@ -281,7 +281,7 @@ void Streams::AllocStreamData(
|
|||
}
|
||||
|
||||
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
|
||||
if(!data.img) {
|
||||
if (!data.img) {
|
||||
data.img = std::make_shared<ImgData>();
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
---
|
||||
in_left:
|
||||
-
|
||||
width: 752
|
||||
height: 480
|
||||
width: 1280
|
||||
height: 800
|
||||
fx: 7.3638305001095546e+02
|
||||
fy: 7.2350066150722432e+02
|
||||
cx: 3.5691961817119693e+02
|
||||
|
@ -13,8 +13,8 @@ in_left:
|
|||
0. ]
|
||||
in_right:
|
||||
-
|
||||
width: 752
|
||||
height: 480
|
||||
width: 1280
|
||||
height: 800
|
||||
fx: 7.3638305001095546e+02
|
||||
fy: 7.2350066150722432e+02
|
||||
cx: 4.5668367112303980e+02
|
||||
|
|
Loading…
Reference in New Issue
Block a user