forked from IntelRealSense/librealsense
-
Notifications
You must be signed in to change notification settings - Fork 0
/
rs_processing.hpp
1227 lines (1091 loc) · 41.7 KB
/
rs_processing.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2017 Intel Corporation. All Rights Reserved.
#ifndef LIBREALSENSE_RS2_PROCESSING_HPP
#define LIBREALSENSE_RS2_PROCESSING_HPP
#include "rs_types.hpp"
#include "rs_frame.hpp"
#include "rs_options.hpp"
namespace rs2
{
/**
* The source used to generate frames, which is usually done by the low level driver for each sensor. frame_source is one of the parameters
* of processing_block's callback function, which can be used to re-generate the frame and via frame_ready invoke another callback function
* to notify application frame is ready. Please refer to "video_processing_thread" code snippet in rs-measure.cpp for a detailed usage example.
*/
class frame_source
{
public:
/**
* Allocate a new video frame with given params
*
* \param[in] profile Stream profile going to allocate.
* \param[in] original Original frame, if new_bpp, new_width, new_height or new_stride is zero, newly created frame will base on original frame's metadata to allocate new frame. If frame_type is RS2_EXTENSION_DEPTH_FRAME, the original of the returned frame will be set to it.
* \param[in] new_bpp Frame bit per pixel to create.
* \param[in] new_width Frame width to create.
* \param[in] new_height Frame height to create.
* \param[in] new_stride Frame stride to create.
* \param[in] frame_type Which frame type are going to create.
* \return The allocated frame
*/
frame allocate_video_frame(const stream_profile& profile,
const frame& original,
int new_bpp = 0,
int new_width = 0,
int new_height = 0,
int new_stride = 0,
rs2_extension frame_type = RS2_EXTENSION_VIDEO_FRAME) const
{
rs2_error* e = nullptr;
auto result = rs2_allocate_synthetic_video_frame(_source, profile.get(),
original.get(), new_bpp, new_width, new_height, new_stride, frame_type, &e);
error::handle(e);
return result;
}
/**
* Allocate a new motion frame with given params
*
* \param[in] profile Stream profile going to allocate.
* \param[in] original Original frame.
* \param[in] frame_type Which frame type are going to create.
* \return The allocated frame
*/
frame allocate_motion_frame(const stream_profile& profile,
const frame& original,
rs2_extension frame_type = RS2_EXTENSION_MOTION_FRAME) const
{
rs2_error* e = nullptr;
auto result = rs2_allocate_synthetic_motion_frame(_source, profile.get(),
original.get(), frame_type, &e);
error::handle(e);
return result;
}
frame allocate_points(const stream_profile& profile,
const frame& original) const
{
rs2_error* e = nullptr;
auto result = rs2_allocate_points(_source, profile.get(), original.get(), &e);
error::handle(e);
return result;
}
/**
* Allocate composite frame with given params
*
* \param[in] frames Frame vecotor used to create composite frame, the size of composite frame will be the same as vector size.
* \return The allocated composite frame
*/
frame allocate_composite_frame(std::vector<frame> frames) const
{
rs2_error* e = nullptr;
std::vector<rs2_frame*> refs(frames.size(), (rs2_frame*)nullptr);
for (size_t i = 0; i < frames.size(); i++)
std::swap(refs[i], frames[i].frame_ref);
auto result = rs2_allocate_composite_frame(_source, refs.data(), (int)refs.size(), &e);
error::handle(e);
return result;
}
/**
* Invoke the callback funtion informing the frame is ready.
*
* \param[in] frames frame to send to callback function.
*/
void frame_ready(frame result) const
{
rs2_error* e = nullptr;
rs2_synthetic_frame_ready(_source, result.get(), &e);
error::handle(e);
result.frame_ref = nullptr;
}
rs2_source* _source;
private:
template<class T>
friend class frame_processor_callback;
frame_source(rs2_source* source) : _source(source) {}
frame_source(const frame_source&) = delete;
};
template<class T>
class frame_processor_callback : public rs2_frame_processor_callback
{
T on_frame_function;
public:
explicit frame_processor_callback(T on_frame) : on_frame_function(on_frame) {}
void on_frame(rs2_frame* f, rs2_source * source) override
{
frame_source src(source);
frame frm(f);
on_frame_function(std::move(frm), src);
}
void release() override { delete this; }
};
class frame_queue
{
public:
/**
* create frame queue. frame queues are the simplest x-platform synchronization primitive provided by librealsense
* to help developers who are not using async APIs
* param[in] capacity size of the frame queue
* param[in] keep_frames if set to true, the queue automatically calls keep() on every frame enqueued into it.
*/
explicit frame_queue(unsigned int capacity, bool keep_frames = false) : _capacity(capacity), _keep(keep_frames)
{
rs2_error* e = nullptr;
_queue = std::shared_ptr<rs2_frame_queue>(
rs2_create_frame_queue(capacity, &e),
rs2_delete_frame_queue);
error::handle(e);
}
frame_queue() : frame_queue(1) {}
/**
* enqueue new frame into the queue
* \param[in] f - frame handle to enqueue (this operation passed ownership to the queue)
*/
void enqueue(frame f) const
{
if (_keep) f.keep();
rs2_enqueue_frame(f.frame_ref, _queue.get()); // noexcept
f.frame_ref = nullptr; // frame has been essentially moved from
}
/**
* wait until new frame becomes available in the queue and dequeue it
* \return frame handle to be released using rs2_release_frame
*/
frame wait_for_frame(unsigned int timeout_ms = 5000) const
{
rs2_error* e = nullptr;
auto frame_ref = rs2_wait_for_frame(_queue.get(), timeout_ms, &e);
error::handle(e);
return{ frame_ref };
}
/**
* poll if a new frame is available and dequeue if it is
* \param[out] f - frame handle
* \return true if new frame was stored to f
*/
template<typename T>
typename std::enable_if<std::is_base_of<rs2::frame, T>::value, bool>::type poll_for_frame(T* output) const
{
rs2_error* e = nullptr;
rs2_frame* frame_ref = nullptr;
auto res = rs2_poll_for_frame(_queue.get(), &frame_ref, &e);
error::handle(e);
frame f{ frame_ref };
if (res) *output = f;
return res > 0;
}
template<typename T>
typename std::enable_if<std::is_base_of<rs2::frame, T>::value, bool>::type try_wait_for_frame(T* output, unsigned int timeout_ms = 5000) const
{
rs2_error* e = nullptr;
rs2_frame* frame_ref = nullptr;
auto res = rs2_try_wait_for_frame(_queue.get(), timeout_ms, &frame_ref, &e);
error::handle(e);
frame f{ frame_ref };
if (res) *output = f;
return res > 0;
}
/**
* Does the same thing as enqueue function.
*/
void operator()(frame f) const
{
enqueue(std::move(f));
}
/**
* Return the capacity of the queue
* \return capacity size
*/
size_t size() const
{
rs2_error* e = nullptr;
auto res = rs2_frame_queue_size(_queue.get(), &e);
error::handle(e);
return static_cast<size_t>(res);
}
/**
* Return the capacity of the queue
* \return capacity size
*/
size_t capacity() const { return _capacity; }
/**
* Return whether or not the queue calls keep on enqueued frames
* \return keeping frames
*/
bool keep_frames() const { return _keep; }
/**
* Provide a getter for underlying rs2_frame_queue object. Used to invoke C-API that require C-type parameters in signature
* \return keeping frames
*/
std::shared_ptr<rs2_frame_queue> get() { return _queue; }
private:
std::shared_ptr<rs2_frame_queue> _queue;
size_t _capacity;
bool _keep;
};
/**
* Define the processing block flow, inherit this class to generate your own processing_block. Please refer to the viewer class in examples.hpp for a detailed usage example.
*/
class processing_block : public options
{
public:
using options::supports;
/**
* Start the processing block with callback function on_frame to inform the application the frame is processed.
*
* \param[in] on_frame callback function for notifying the frame to be processed is ready.
*/
template<class S>
void start(S on_frame)
{
rs2_error* e = nullptr;
rs2_start_processing(get(), new frame_callback<S>(on_frame), &e);
error::handle(e);
}
/**
* Does the same thing as "start" function
*
* \param[in] on_frame address of callback function for noticing the frame to be processed is ready.
* \return address of callback function.
*/
template<class S>
S& operator>>(S& on_frame)
{
start(on_frame);
return on_frame;
}
/**
* Ask processing block to process the frame
*
* \param[in] on_frame frame to be processed.
*/
void invoke(frame f) const
{
rs2_frame* ptr = nullptr;
std::swap(f.frame_ref, ptr);
rs2_error* e = nullptr;
rs2_process_frame(get(), ptr, &e);
error::handle(e);
}
/**
* constructor with already created low level processing block assigned.
*
* \param[in] block - low level rs2_processing_block created before.
*/
processing_block(std::shared_ptr<rs2_processing_block> block)
: options((rs2_options*)block.get()), _block(block)
{
}
/**
* constructor with callback function on_frame in rs2_frame_processor_callback structure assigned.
*
* \param[in] processing_function - function pointer of on_frame function in rs2_frame_processor_callback structure, which will be called back by invoke function .
*/
template<class S>
processing_block(S processing_function)
{
rs2_error* e = nullptr;
_block = std::shared_ptr<rs2_processing_block>(
rs2_create_processing_block(new frame_processor_callback<S>(processing_function), &e),
rs2_delete_processing_block);
options::operator=(_block);
error::handle(e);
}
operator rs2_options*() const { return (rs2_options*)get(); }
rs2_processing_block* get() const { return _block.get(); }
/**
* Check if a specific camera info field is supported.
* \param[in] info the parameter to check for support
* \return true if the parameter both exists and well-defined for the specific processing_block
*/
bool supports(rs2_camera_info info) const
{
rs2_error* e = nullptr;
auto is_supported = rs2_supports_processing_block_info(_block.get(), info, &e);
error::handle(e);
return is_supported > 0;
}
/**
* Retrieve camera specific information, like versions of various internal components.
* \param[in] info camera info type to retrieve
* \return the requested camera info string, in a format specific to the processing_block model
*/
const char* get_info(rs2_camera_info info) const
{
rs2_error* e = nullptr;
auto result = rs2_get_processing_block_info(_block.get(), info, &e);
error::handle(e);
return result;
}
protected:
void register_simple_option(rs2_option option_id, option_range range) {
rs2_error * e = nullptr;
rs2_processing_block_register_simple_option(_block.get(), option_id,
range.min, range.max, range.step, range.def, &e);
error::handle(e);
}
std::shared_ptr<rs2_processing_block> _block;
};
/**
* Define the filter workflow, inherit this class to generate your own filter. Refer to the viewer class in examples.hpp for a more detailed example.
*/
class filter : public processing_block, public filter_interface
{
public:
/**
* Ask processing block to process the frame and poll the processed frame from internal queue
*
* \param[in] on_frame frame to be processed.
* return processed frame
*/
rs2::frame process(rs2::frame frame) const override
{
invoke(frame);
rs2::frame f;
if (!_queue.poll_for_frame(&f))
throw std::runtime_error("Error occured during execution of the processing block! See the log for more info");
return f;
}
/**
* constructor with already created low level processing block assigned.
*
* \param[in] block - low level rs2_processing_block created before.
*/
filter(std::shared_ptr<rs2_processing_block> block, int queue_size = 1)
: processing_block(block),
_queue(queue_size)
{
start(_queue);
}
/**
* constructor with callback function on_frame in rs2_frame_processor_callback structure assigned.
*
* \param[in] processing_function - function pointer of on_frame function in rs2_frame_processor_callback structure, which will be called back by invoke function .
*/
template<class S>
filter(S processing_function, int queue_size = 1) :
processing_block(processing_function),
_queue(queue_size)
{
start(_queue);
}
frame_queue get_queue() { return _queue; }
rs2_processing_block* get() const { return _block.get(); }
template<class T>
bool is() const
{
T extension(*this);
return extension;
}
template<class T>
T as() const
{
T extension(*this);
return extension;
}
operator bool() const { return _block.get() != nullptr; }
protected:
frame_queue _queue;
};
/**
* Generates 3D point clouds based on a depth frame. Can also map textures from a color frame.
*/
class pointcloud : public filter
{
public:
/**
* create pointcloud instance
*/
pointcloud() : filter(init(), 1) {}
pointcloud(rs2_stream stream, int index = 0) : filter(init(), 1)
{
set_option(RS2_OPTION_STREAM_FILTER, float(stream));
set_option(RS2_OPTION_STREAM_INDEX_FILTER, float(index));
}
/**
* Generate the pointcloud and texture mappings of depth map.
*
* \param[in] depth - the depth frame to generate point cloud and texture.
* \return points instance.
*/
points calculate(frame depth) const
{
auto res = process(depth);
if (res.as<points>())
return res;
if (auto set = res.as <frameset>())
{
for (auto f : set)
{
if(f.as<points>())
return f;
}
}
throw std::runtime_error("Error occured during execution of the processing block! See the log for more info");
}
/**
* Map the point cloud to the given color frame.
*
* \param[in] mapped - the frame to be mapped to as texture.
*/
void map_to(frame mapped)
{
set_option(RS2_OPTION_STREAM_FILTER, float(mapped.get_profile().stream_type()));
set_option(RS2_OPTION_STREAM_FORMAT_FILTER, float(mapped.get_profile().format()));
set_option(RS2_OPTION_STREAM_INDEX_FILTER, float(mapped.get_profile().stream_index()));
process(mapped);
}
protected:
pointcloud(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_pointcloud(&e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(pb);
return block;
}
};
class yuy_decoder : public filter
{
public:
/**
* Creates YUY decoder processing block. This block accepts raw YUY frames and outputs frames of other formats.
* YUY is a common video format used by a variety of web-cams. It benefits from packing pixels into 2 bytes per pixel
* without signficant quality drop. YUY representation can be converted back to more usable RGB form,
* but this requires somewhat costly conversion.
* The SDK will automatically try to use SSE2 and AVX instructions and CUDA where available to get
* best performance. Other implementations (using GLSL, OpenCL, Neon and NCS) should follow.
*/
yuy_decoder() : filter(init(), 1) { }
protected:
yuy_decoder(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_yuy_decoder(&e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class y411_decoder : public filter
{
public:
/**
* Creates y411 decoder processing block. This block accepts raw y411 frames and outputs frames in RGB8.
* https://www.fourcc.org/pixel-format/yuv-y411/
* Y411 is disguised as NV12 to allow Linux compatibility. Both are 12bpp encodings that allow high-resolution
* modes in the camera to still fit within the USB3 limits (YUY wasn't enough).
*/
y411_decoder() : filter(init()) { }
protected:
y411_decoder(std::shared_ptr<rs2_processing_block> block) : filter(block) {}
private:
static std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_y411_decoder(&e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class threshold_filter : public filter
{
public:
/**
* Creates depth thresholding filter
* By controlling min and max options on the block, one could filter out depth values
* that are either too large or too small, as a software post-processing step
*/
threshold_filter(float min_dist = 0.15f, float max_dist = 4.f)
: filter(init(), 1)
{
set_option(RS2_OPTION_MIN_DISTANCE, min_dist);
set_option(RS2_OPTION_MAX_DISTANCE, max_dist);
}
threshold_filter(filter f) : filter(f)
{
rs2_error* e = nullptr;
if (!rs2_is_processing_block_extendable_to(f.get(), RS2_EXTENSION_THRESHOLD_FILTER, &e) && !e)
{
_block.reset();
}
error::handle(e);
}
protected:
threshold_filter(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_threshold(&e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class units_transform : public filter
{
public:
/**
* Creates depth units to meters processing block.
*/
units_transform() : filter(init(), 1) {}
protected:
units_transform(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_units_transform(&e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class asynchronous_syncer : public processing_block
{
public:
/**
* Real asynchronous syncer within syncer class
*/
asynchronous_syncer() : processing_block(init()) {}
private:
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_sync_processing_block(&e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class syncer
{
public:
/**
* Sync instance to align frames from different streams
*/
syncer(int queue_size = 1)
:_results(queue_size)
{
_sync.start(_results);
}
/**
* Wait until coherent set of frames becomes available
* \param[in] timeout_ms Max time in milliseconds to wait until an exception will be thrown
* \return Set of coherent frames
*/
frameset wait_for_frames(unsigned int timeout_ms = 5000) const
{
return frameset(_results.wait_for_frame(timeout_ms));
}
/**
* Check if a coherent set of frames is available
* \param[out] fs New coherent frame-set
* \return true if new frame-set was stored to result
*/
bool poll_for_frames(frameset* fs) const
{
frame result;
if (_results.poll_for_frame(&result))
{
*fs = frameset(result);
return true;
}
return false;
}
/**
* Wait until coherent set of frames becomes available
* \param[in] timeout_ms Max time in milliseconds to wait until an available frame
* \param[out] fs New coherent frame-set
* \return true if new frame-set was stored to result
*/
bool try_wait_for_frames(frameset* fs, unsigned int timeout_ms = 5000) const
{
frame result;
if (_results.try_wait_for_frame(&result, timeout_ms))
{
*fs = frameset(result);
return true;
}
return false;
}
void operator()(frame f) const
{
_sync.invoke(std::move(f));
}
private:
asynchronous_syncer _sync;
frame_queue _results;
};
/**
Auxiliary processing block that performs image alignment using depth data and camera calibration
*/
class align : public filter
{
public:
/**
Create align filter
Alignment is performed between a depth image and another image.
To perform alignment of a depth image to the other, set the align_to parameter with the other stream type.
To perform alignment of a non depth image to a depth image, set the align_to parameter to RS2_STREAM_DEPTH.
Camera calibration and frame's stream type are determined on the fly, according to the first valid frameset passed to process().
* \param[in] align_to The stream type to which alignment should be made.
*/
align(rs2_stream align_to) : filter(init(align_to), 1) {}
using filter::process;
/**
* Run the alignment process on the given frames to get an aligned set of frames
*
* \param[in] frames A set of frames, where at least one of which is a depth frame
* \return Input frames aligned to one another
*/
frameset process(frameset frames)
{
return filter::process(frames);
}
protected:
align(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init(rs2_stream align_to)
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_align(align_to, &e),
rs2_delete_processing_block);
error::handle(e);
return block;
}
};
class colorizer : public filter
{
public:
/**
* Create colorizer filter
* Colorizer generate color image based on input depth frame
*/
colorizer() : filter(init(), 1) { }
/**
* Create colorizer processing block
* Colorizer generate color image base on input depth frame
* \param[in] color_scheme - select one of the available color schemes:
* 0 - Jet
* 1 - Classic
* 2 - WhiteToBlack
* 3 - BlackToWhite
* 4 - Bio
* 5 - Cold
* 6 - Warm
* 7 - Quantized
* 8 - Pattern
* 9 - Hue
*/
colorizer(float color_scheme) : filter(init(), 1)
{
set_option(RS2_OPTION_COLOR_SCHEME, float(color_scheme));
}
/**
* Start to generate color image base on depth frame
* \param[in] depth - depth frame to be processed to generate the color image
* \return video_frame - generated color image
*/
video_frame colorize(frame depth) const
{
return process(depth);
}
protected:
colorizer(std::shared_ptr<rs2_processing_block> block) : filter(block, 1) {}
private:
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_colorizer(&e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(pb);
return block;
}
};
class decimation_filter : public filter
{
public:
/**
* Create decimation filter
* Decimation filter performs downsampling by using the median with specific kernel size
*/
decimation_filter() : filter(init(), 1) {}
/**
* Create decimation filter
* Decimation filter performs downsampling by using the median with specific kernel size
* \param[in] magnitude - number of filter iterations.
*/
decimation_filter(float magnitude) : filter(init(), 1)
{
set_option(RS2_OPTION_FILTER_MAGNITUDE, magnitude);
}
decimation_filter(filter f) : filter(f)
{
rs2_error* e = nullptr;
if (!rs2_is_processing_block_extendable_to(f.get(), RS2_EXTENSION_DECIMATION_FILTER, &e) && !e)
{
_block.reset();
}
error::handle(e);
}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_decimation_filter_block(&e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(this);
return block;
}
};
class temporal_filter : public filter
{
public:
/**
* Create temporal filter with default settings
* Temporal filter smooths the image by calculating multiple frames with alpha and delta settings
* alpha defines the weight of current frame, and delta defines the threshold for edge classification and preserving.
* For more information, check the temporal-filter.cpp
*/
temporal_filter() : filter(init(), 1) {}
/**
* Create temporal filter with user settings
* Temporal filter smooths the image by calculating multiple frames with alpha and delta settings
* \param[in] smooth_alpha - defines the weight of current frame.
* \param[in] smooth_delta - delta defines the threshold for edge classification and preserving.
* \param[in] persistence_control - A set of predefined rules (masks) that govern when missing pixels will be replaced with the last valid value so that the data will remain persistent over time:
* 0 - Disabled - Persistency filter is not activated and no hole filling occurs.
* 1 - Valid in 8/8 - Persistency activated if the pixel was valid in 8 out of the last 8 frames
* 2 - Valid in 2/last 3 - Activated if the pixel was valid in two out of the last 3 frames
* 3 - Valid in 2/last 4 - Activated if the pixel was valid in two out of the last 4 frames
* 4 - Valid in 2/8 - Activated if the pixel was valid in two out of the last 8 frames
* 5 - Valid in 1/last 2 - Activated if the pixel was valid in one of the last two frames
* 6 - Valid in 1/last 5 - Activated if the pixel was valid in one out of the last 5 frames
* 7 - Valid in 1/last 8 - Activated if the pixel was valid in one out of the last 8 frames
* 8 - Persist Indefinitely - Persistency will be imposed regardless of the stored history(most aggressive filtering)
* For more information, check temporal-filter.cpp
*/
temporal_filter(float smooth_alpha, float smooth_delta, int persistence_control) : filter(init(), 1)
{
set_option(RS2_OPTION_HOLES_FILL, float(persistence_control));
set_option(RS2_OPTION_FILTER_SMOOTH_ALPHA, float(smooth_alpha));
set_option(RS2_OPTION_FILTER_SMOOTH_DELTA, float(smooth_delta));
}
temporal_filter(filter f) :filter(f)
{
rs2_error* e = nullptr;
if (!rs2_is_processing_block_extendable_to(f.get(), RS2_EXTENSION_TEMPORAL_FILTER, &e) && !e)
{
_block.reset();
}
error::handle(e);
}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_temporal_filter_block(&e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(pb);
return block;
}
};
class spatial_filter : public filter
{
public:
/**
* Create spatial filter
* Spatial filter smooths the image by calculating frame with alpha and delta settings
* alpha defines the weight of the current pixel for smoothing, and is bounded within [25..100]%,
* delta defines the depth gradient below which the smoothing will occur as number of depth levels
* For more information, check the spatial-filter.cpp
*/
spatial_filter() : filter(init(), 1) { }
/**
* Create spatial filter
* Spatial filter smooths the image by calculating frame with alpha and delta settings
* \param[in] smooth_alpha - defines the weight of the current pixel for smoothing is bounded within [25..100]%
* \param[in] smooth_delta - defines the depth gradient below which the smoothing will occur as number of depth levels
* \param[in] magnitude - number of filter iterations.
* \param[in] hole_fill - an in-place heuristic symmetric hole-filling mode applied horizontally during the filter passes.
* Intended to rectify minor artefacts with minimal performance impact
*/
spatial_filter(float smooth_alpha, float smooth_delta, float magnitude, float hole_fill) : filter(init(), 1)
{
set_option(RS2_OPTION_FILTER_SMOOTH_ALPHA, float(smooth_alpha));
set_option(RS2_OPTION_FILTER_SMOOTH_DELTA, float(smooth_delta));
set_option(RS2_OPTION_FILTER_MAGNITUDE, magnitude);
set_option(RS2_OPTION_HOLES_FILL, hole_fill);
}
spatial_filter(filter f) :filter(f)
{
rs2_error* e = nullptr;
if (!rs2_is_processing_block_extendable_to(f.get(), RS2_EXTENSION_SPATIAL_FILTER, &e) && !e)
{
_block.reset();
}
error::handle(e);
}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init()
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_spatial_filter_block(&e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(pb);
return block;
}
};
class disparity_transform : public filter
{
public:
/**
* Create disparity transform filter
* Converts from depth representation to disparity representation and vice-versa in depth frames
*/
disparity_transform(bool transform_to_disparity = true) : filter(init(transform_to_disparity), 1) { }
disparity_transform(filter f) :filter(f)
{
rs2_error* e = nullptr;
if (!rs2_is_processing_block_extendable_to(f.get(), RS2_EXTENSION_DISPARITY_FILTER, &e) && !e)
{
_block.reset();
}
error::handle(e);
}
private:
friend class context;
std::shared_ptr<rs2_processing_block> init(bool transform_to_disparity)
{
rs2_error* e = nullptr;
auto block = std::shared_ptr<rs2_processing_block>(
rs2_create_disparity_transform_block(uint8_t(transform_to_disparity), &e),
rs2_delete_processing_block);
error::handle(e);
// Redirect options API to the processing block
//options::operator=(pb);