OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * Copyright (c) 2008-2016 The Khronos Group Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and/or associated documentation files (the
6  * "Materials"), to deal in the Materials without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sublicense, and/or sell copies of the Materials, and to
9  * permit persons to whom the Materials are furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included
13  * in all copies or substantial portions of the Materials.
14  *
15  * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
16  * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
17  * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
18  * https://www.khronos.org/registry/
19  *
20  * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26  * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
27  ******************************************************************************/
28 
316 
329 
393 #ifndef CL_HPP_
394 #define CL_HPP_
395 
396 /* Handle deprecated preprocessor definitions. In each case, we only check for
397  * the old name if the new name is not defined, so that user code can define
398  * both and hence work with either version of the bindings.
399  */
400 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
401 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
402 # define CL_HPP_USE_DX_INTEROP
403 #endif
404 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
405 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
406 # define CL_HPP_USE_CL_DEVICE_FISSION
407 #endif
408 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
409 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
410 # define CL_HPP_ENABLE_EXCEPTIONS
411 #endif
412 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
413 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
414 # define CL_HPP_NO_STD_VECTOR
415 #endif
416 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
417 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
418 # define CL_HPP_NO_STD_STRING
419 #endif
420 #if defined(VECTOR_CLASS)
421 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
422 #endif
423 #if defined(STRING_CLASS)
424 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
425 #endif
426 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
427 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
428 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
429 #endif
430 
431 /* Warn about features that are no longer supported
432  */
433 #if defined(__USE_DEV_VECTOR)
434 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
435 #endif
436 #if defined(__USE_DEV_STRING)
437 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
438 #endif
439 
440 /* Detect which version to target */
441 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
442 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 200 (OpenCL 2.0)")
443 # define CL_HPP_TARGET_OPENCL_VERSION 200
444 #endif
445 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && CL_HPP_TARGET_OPENCL_VERSION != 110 && CL_HPP_TARGET_OPENCL_VERSION != 120 && CL_HPP_TARGET_OPENCL_VERSION != 200
446 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 200")
447 # undef CL_HPP_TARGET_OPENCL_VERSION
448 # define CL_HPP_TARGET_OPENCL_VERSION 200
449 #endif
450 
451 /* Forward target OpenCL version to C headers if necessary */
452 #if defined(CL_TARGET_OPENCL_VERSION)
453 /* Warn if prior definition of CL_TARGET_OPENCL_VERSION is lower than
454  * requested C++ bindings version */
455 #if CL_TARGET_OPENCL_VERSION < CL_HPP_TARGET_OPENCL_VERSION
456 # pragma message("CL_TARGET_OPENCL_VERSION is already defined as is lower than CL_HPP_TARGET_OPENCL_VERSION")
457 #endif
458 #else
459 # define CL_TARGET_OPENCL_VERSION CL_HPP_TARGET_OPENCL_VERSION
460 #endif
461 
462 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
463 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
464 #endif
465 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && CL_HPP_MINIMUM_OPENCL_VERSION != 110 && CL_HPP_MINIMUM_OPENCL_VERSION != 120 && CL_HPP_MINIMUM_OPENCL_VERSION != 200
466 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 100")
467 # undef CL_HPP_MINIMUM_OPENCL_VERSION
468 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
469 #endif
470 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
471 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
472 #endif
473 
474 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
475 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
476 #endif
477 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
478 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
479 #endif
480 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
481 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
482 #endif
483 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
484 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
485 #endif
486 
487 #ifdef _WIN32
488 
489 #include <malloc.h>
490 
491 #if defined(CL_HPP_USE_DX_INTEROP)
492 #include <CL/cl_d3d10.h>
493 #include <CL/cl_dx9_media_sharing.h>
494 #endif
495 #endif // _WIN32
496 
497 #if defined(_MSC_VER)
498 #include <intrin.h>
499 #endif // _MSC_VER
500 
501  // Check for a valid C++ version
502 
503 // Need to do both tests here because for some reason __cplusplus is not
504 // updated in visual studio
505 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
506 #error Visual studio 2013 or another C++11-supporting compiler required
507 #endif
508 
509 //
510 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
511 #include <CL/cl_ext.h>
512 #endif
513 
514 #if defined(__APPLE__) || defined(__MACOSX)
515 #include <OpenCL/opencl.h>
516 #else
517 #include <CL/opencl.h>
518 #endif // !__APPLE__
519 
520 #if (__cplusplus >= 201103L)
521 #define CL_HPP_NOEXCEPT_ noexcept
522 #else
523 #define CL_HPP_NOEXCEPT_
524 #endif
525 
526 #if defined(_MSC_VER)
527 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
528 #else
529 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
530 #endif // !_MSC_VER
531 
532 // Define deprecated prefixes and suffixes to ensure compilation
533 // in case they are not pre-defined
534 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
535 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
536 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
537 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
538 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
539 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
540 
541 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
542 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
543 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
544 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
545 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
546 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
547 
548 #if !defined(CL_CALLBACK)
549 #define CL_CALLBACK
550 #endif //CL_CALLBACK
551 
552 #include <utility>
553 #include <limits>
554 #include <iterator>
555 #include <mutex>
556 #include <cstring>
557 #include <functional>
558 
559 
560 // Define a size_type to represent a correctly resolved size_t
561 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
562 namespace cl {
563  using size_type = ::size_t;
564 } // namespace cl
565 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
566 namespace cl {
567  using size_type = size_t;
568 } // namespace cl
569 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
570 
571 
572 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
573 #include <exception>
574 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
575 
576 #if !defined(CL_HPP_NO_STD_VECTOR)
577 #include <vector>
578 namespace cl {
579  template < class T, class Alloc = std::allocator<T> >
580  using vector = std::vector<T, Alloc>;
581 } // namespace cl
582 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
583 
584 #if !defined(CL_HPP_NO_STD_STRING)
585 #include <string>
586 namespace cl {
587  using string = std::string;
588 } // namespace cl
589 #endif // #if !defined(CL_HPP_NO_STD_STRING)
590 
591 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
592 
593 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
594 #include <memory>
595 namespace cl {
596  // Replace unique_ptr and allocate_pointer for internal use
597  // to allow user to replace them
598  template<class T, class D>
599  using pointer = std::unique_ptr<T, D>;
600 } // namespace cl
601 #endif
602 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
603 #if !defined(CL_HPP_NO_STD_ARRAY)
604 #include <array>
605 namespace cl {
606  template < class T, size_type N >
607  using array = std::array<T, N>;
608 } // namespace cl
609 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
610 
611 // Define size_type appropriately to allow backward-compatibility
612 // use of the old size_t interface class
613 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
614 namespace cl {
615  namespace compatibility {
620  template <int N>
621  class size_t
622  {
623  private:
624  size_type data_[N];
625 
626  public:
628  size_t()
629  {
630  for (int i = 0; i < N; ++i) {
631  data_[i] = 0;
632  }
633  }
634 
635  size_t(const array<size_type, N> &rhs)
636  {
637  for (int i = 0; i < N; ++i) {
638  data_[i] = rhs[i];
639  }
640  }
641 
642  size_type& operator[](int index)
643  {
644  return data_[index];
645  }
646 
647  const size_type& operator[](int index) const
648  {
649  return data_[index];
650  }
651 
653  operator size_type* () { return data_; }
654 
656  operator const size_type* () const { return data_; }
657 
658  operator array<size_type, N>() const
659  {
660  array<size_type, N> ret;
661 
662  for (int i = 0; i < N; ++i) {
663  ret[i] = data_[i];
664  }
665  return ret;
666  }
667  };
668  } // namespace compatibility
669 
670  template<int N>
671  using size_t = compatibility::size_t<N>;
672 } // namespace cl
673 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
674 
675 // Helper alias to avoid confusing the macros
676 namespace cl {
677  namespace detail {
678  using size_t_array = array<size_type, 3>;
679  } // namespace detail
680 } // namespace cl
681 
682 
688 namespace cl {
689  class Memory;
690 
691 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
692  if (!pfn_##name) { \
693  pfn_##name = (PFN_##name) \
694  clGetExtensionFunctionAddress(#name); \
695  if (!pfn_##name) { \
696  } \
697  }
698 
699 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
700  if (!pfn_##name) { \
701  pfn_##name = (PFN_##name) \
702  clGetExtensionFunctionAddressForPlatform(platform, #name); \
703  if (!pfn_##name) { \
704  } \
705  }
706 
707  class Program;
708  class Device;
709  class Context;
710  class CommandQueue;
711  class DeviceCommandQueue;
712  class Memory;
713  class Buffer;
714  class Pipe;
715 
716 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
717 
721  class Error : public std::exception
722  {
723  private:
724  cl_int err_;
725  const char * errStr_;
726  public:
736  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
737  {}
738 
739  ~Error() throw() {}
740 
745  virtual const char * what() const throw ()
746  {
747  if (errStr_ == NULL) {
748  return "empty";
749  }
750  else {
751  return errStr_;
752  }
753  }
754 
759  cl_int err(void) const { return err_; }
760  };
761 #define CL_HPP_ERR_STR_(x) #x
762 #else
763 #define CL_HPP_ERR_STR_(x) NULL
764 #endif // CL_HPP_ENABLE_EXCEPTIONS
765 
766 
767 namespace detail
768 {
769 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
770 static inline cl_int errHandler (
771  cl_int err,
772  const char * errStr = NULL)
773 {
774  if (err != CL_SUCCESS) {
775  throw Error(err, errStr);
776  }
777  return err;
778 }
779 #else
780 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
781 {
782  (void) errStr; // suppress unused variable warning
783  return err;
784 }
785 #endif // CL_HPP_ENABLE_EXCEPTIONS
786 }
787 
788 
789 
791 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
792 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
793 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
794 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
795 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
796 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
797 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
798 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
799 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
800 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
801 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
802 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
803 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
804 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
805 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
806 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
807 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
808 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
809 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
810 
811 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
812 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
813 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
814 
815 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
816 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
817 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
818 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
819 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
820 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
821 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
822 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
823 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
824 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
825 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
826 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
827 
828 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
829 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
830 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
831 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
832 
833 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
834 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
835 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
836 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
837 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
838 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
839 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
840 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
841 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
842 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
843 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
844 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
845 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
846 
847 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
848 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
849 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
850 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
851 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
852 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
853 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
854 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
855 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
856 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
857 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
858 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
859 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
860 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
861 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
862 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
863 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
864 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
865 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
866 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
867 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
868 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
869 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
870 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
871 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
872 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
873 
874 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
875 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
876 
877 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
878 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
879 
880 
881 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
882 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
883 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
884 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
885 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
886 
890 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
891 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
892 #else
893 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
894 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
895 
899 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
900 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
901 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
902 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
903 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
904 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
905 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
906 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
907 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
908 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
909 
913 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
914 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
915 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
916 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
917 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
918 
922 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
923 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
924 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
925 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
926 
927 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
928 
930 
931 namespace detail {
932 
933 // Generic getInfoHelper. The final parameter is used to guide overload
934 // resolution: the actual parameter passed is an int, which makes this
935 // a worse conversion sequence than a specialization that declares the
936 // parameter as an int.
937 template<typename Functor, typename T>
938 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
939 {
940  return f(name, sizeof(T), param, NULL);
941 }
942 
943 // Specialized for getInfo<CL_PROGRAM_BINARIES>
944 // Assumes that the output vector was correctly resized on the way in
945 template <typename Func>
946 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
947 {
948  if (name != CL_PROGRAM_BINARIES) {
949  return CL_INVALID_VALUE;
950  }
951  if (param) {
952  // Create array of pointers, calculate total size and pass pointer array in
953  size_type numBinaries = param->size();
954  vector<unsigned char*> binariesPointers(numBinaries);
955 
956  for (size_type i = 0; i < numBinaries; ++i)
957  {
958  binariesPointers[i] = (*param)[i].data();
959  }
960 
961  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
962 
963  if (err != CL_SUCCESS) {
964  return err;
965  }
966  }
967 
968 
969  return CL_SUCCESS;
970 }
971 
972 // Specialized getInfoHelper for vector params
973 template <typename Func, typename T>
974 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
975 {
976  size_type required;
977  cl_int err = f(name, 0, NULL, &required);
978  if (err != CL_SUCCESS) {
979  return err;
980  }
981  const size_type elements = required / sizeof(T);
982 
983  // Temporary to avoid changing param on an error
984  vector<T> localData(elements);
985  err = f(name, required, localData.data(), NULL);
986  if (err != CL_SUCCESS) {
987  return err;
988  }
989  if (param) {
990  *param = std::move(localData);
991  }
992 
993  return CL_SUCCESS;
994 }
995 
996 /* Specialization for reference-counted types. This depends on the
997  * existence of Wrapper<T>::cl_type, and none of the other types having the
998  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
999  * does not work, because when using a derived type (e.g. Context) the generic
1000  * template will provide a better match.
1001  */
1002 template <typename Func, typename T>
1003 inline cl_int getInfoHelper(
1004  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
1005 {
1006  size_type required;
1007  cl_int err = f(name, 0, NULL, &required);
1008  if (err != CL_SUCCESS) {
1009  return err;
1010  }
1011 
1012  const size_type elements = required / sizeof(typename T::cl_type);
1013 
1014  vector<typename T::cl_type> value(elements);
1015  err = f(name, required, value.data(), NULL);
1016  if (err != CL_SUCCESS) {
1017  return err;
1018  }
1019 
1020  if (param) {
1021  // Assign to convert CL type to T for each element
1022  param->resize(elements);
1023 
1024  // Assign to param, constructing with retain behaviour
1025  // to correctly capture each underlying CL object
1026  for (size_type i = 0; i < elements; i++) {
1027  (*param)[i] = T(value[i], true);
1028  }
1029  }
1030  return CL_SUCCESS;
1031 }
1032 
1033 // Specialized GetInfoHelper for string params
1034 template <typename Func>
1035 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1036 {
1037  size_type required;
1038  cl_int err = f(name, 0, NULL, &required);
1039  if (err != CL_SUCCESS) {
1040  return err;
1041  }
1042 
1043  // std::string has a constant data member
1044  // a char vector does not
1045  if (required > 0) {
1046  vector<char> value(required);
1047  err = f(name, required, value.data(), NULL);
1048  if (err != CL_SUCCESS) {
1049  return err;
1050  }
1051  if (param) {
1052  param->assign(begin(value), prev(end(value)));
1053  }
1054  }
1055  else if (param) {
1056  param->assign("");
1057  }
1058  return CL_SUCCESS;
1059 }
1060 
1061 // Specialized GetInfoHelper for clsize_t params
1062 template <typename Func, size_type N>
1063 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1064 {
1065  size_type required;
1066  cl_int err = f(name, 0, NULL, &required);
1067  if (err != CL_SUCCESS) {
1068  return err;
1069  }
1070 
1071  size_type elements = required / sizeof(size_type);
1072  vector<size_type> value(elements, 0);
1073 
1074  err = f(name, required, value.data(), NULL);
1075  if (err != CL_SUCCESS) {
1076  return err;
1077  }
1078 
1079  // Bound the copy with N to prevent overruns
1080  // if passed N > than the amount copied
1081  if (elements > N) {
1082  elements = N;
1083  }
1084  for (size_type i = 0; i < elements; ++i) {
1085  (*param)[i] = value[i];
1086  }
1087 
1088  return CL_SUCCESS;
1089 }
1090 
1091 template<typename T> struct ReferenceHandler;
1092 
1093 /* Specialization for reference-counted types. This depends on the
1094  * existence of Wrapper<T>::cl_type, and none of the other types having the
1095  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1096  * does not work, because when using a derived type (e.g. Context) the generic
1097  * template will provide a better match.
1098  */
1099 template<typename Func, typename T>
1100 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1101 {
1102  typename T::cl_type value;
1103  cl_int err = f(name, sizeof(value), &value, NULL);
1104  if (err != CL_SUCCESS) {
1105  return err;
1106  }
1107  *param = value;
1108  if (value != NULL)
1109  {
1110  err = param->retain();
1111  if (err != CL_SUCCESS) {
1112  return err;
1113  }
1114  }
1115  return CL_SUCCESS;
1116 }
1117 
1118 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1119  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1120  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1121  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1122  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1123  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1124  \
1125  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1126  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1127  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1128  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1129  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1130  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1131  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1132  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1133  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1134  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1135  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1136  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1137  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1138  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1139  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1140  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1141  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1142  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1143  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1144  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1145  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1146  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1147  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1148  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1149  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1150  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1151  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1152  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1153  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1154  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1155  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1156  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1157  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1158  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1159  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1160  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1161  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1162  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1163  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1164  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1165  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1166  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1167  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1168  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1169  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1170  F(cl_device_info, CL_DEVICE_NAME, string) \
1171  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1172  F(cl_device_info, CL_DRIVER_VERSION, string) \
1173  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1174  F(cl_device_info, CL_DEVICE_VERSION, string) \
1175  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1176  \
1177  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1178  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1179  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1180  \
1181  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1182  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1183  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1184  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1185  \
1186  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1187  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1188  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1189  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1190  \
1191  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1192  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1193  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1194  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1195  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1196  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1197  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1198  \
1199  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1200  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1201  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1202  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1203  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1204  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1205  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1206  \
1207  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1208  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1209  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1210  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1211  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1212  \
1213  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1214  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1215  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1216  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1217  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1218  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1219  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1220  \
1221  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1222  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1223  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1224  \
1225  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1226  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1227  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1228  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1229  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1230  \
1231  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1232  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1233  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1234  \
1235  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1236  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1237  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1238  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1239 
1240 
1241 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1242  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1243  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1244  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1245  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1246  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1247  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1248  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1249  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1250  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1251  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1252  \
1253  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1254  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1255  \
1256  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1257  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1258  \
1259  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1260 
1261 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1262  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1263  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1264  \
1265  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1266  \
1267  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1268  \
1269  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1270  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1271  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1272  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1273  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1274  \
1275  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1276  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1277  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1278  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1279  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1280  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1281  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1282  \
1283  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1284  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1285  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1286 
1287 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1288  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1289  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1290  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1291  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1292  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1293  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1294  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1295  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1296  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1297  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1298  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1299  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1300  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1301  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1302  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1303  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1304  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1305  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1306 
1307 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1308  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1309  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1310  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1311  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1312  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1313 
1314 template <typename enum_type, cl_int Name>
1315 struct param_traits {};
1316 
1317 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1318 struct token; \
1319 template<> \
1320 struct param_traits<detail:: token,param_name> \
1321 { \
1322  enum { value = param_name }; \
1323  typedef T param_type; \
1324 };
1325 
1326 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1327 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1328 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1329 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1330 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1331 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1332 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1333 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1334 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1335 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1336 
1337 
1338 // Flags deprecated in OpenCL 2.0
1339 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1340  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1341 
1342 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1343  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1344 
1345 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1346  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1347 
1348 // Include deprecated query flags based on versions
1349 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1350 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1351 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1352 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1353 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1354 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1355 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1356 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1357 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1358 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1359 
1360 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1361 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1362 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1363 
1364 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1365 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1366 #endif
1367 
1368 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1369 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1370 #endif
1371 
1372 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1373 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1374 #endif
1375 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1376 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1377 #endif
1378 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1379 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1380 #endif
1381 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1382 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1383 #endif
1384 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1385 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1386 #endif
1387 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1388 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1389 #endif
1390 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1391 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1392 #endif
1393 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1394 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1395 #endif
1396 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1397 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1398 #endif
1399 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1400 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1401 #endif
1402 
1403 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1404 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1405 #endif
1406 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1407 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1408 #endif
1409 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1410 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1411 #endif
1412 #ifdef CL_DEVICE_WARP_SIZE_NV
1413 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1414 #endif
1415 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1416 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1417 #endif
1418 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1419 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1420 #endif
1421 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1422 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1423 #endif
1424 
1425 // Convenience functions
1426 
1427 template <typename Func, typename T>
1428 inline cl_int
1429 getInfo(Func f, cl_uint name, T* param)
1430 {
1431  return getInfoHelper(f, name, param, 0);
1432 }
1433 
1434 template <typename Func, typename Arg0>
1436 {
1437  Func f_; const Arg0& arg0_;
1438  cl_int operator ()(
1439  cl_uint param, size_type size, void* value, size_type* size_ret)
1440  { return f_(arg0_, param, size, value, size_ret); }
1441 };
1442 
1443 template <typename Func, typename Arg0, typename Arg1>
1445 {
1446  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1447  cl_int operator ()(
1448  cl_uint param, size_type size, void* value, size_type* size_ret)
1449  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1450 };
1451 
1452 template <typename Func, typename Arg0, typename T>
1453 inline cl_int
1454 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1455 {
1456  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1457  return getInfoHelper(f0, name, param, 0);
1458 }
1459 
1460 template <typename Func, typename Arg0, typename Arg1, typename T>
1461 inline cl_int
1462 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1463 {
1464  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1465  return getInfoHelper(f0, name, param, 0);
1466 }
1467 
1468 
1469 template<typename T>
1470 struct ReferenceHandler
1471 { };
1472 
1473 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1474 
1477 template <>
1478 struct ReferenceHandler<cl_device_id>
1479 {
1489  static cl_int retain(cl_device_id device)
1490  { return ::clRetainDevice(device); }
1500  static cl_int release(cl_device_id device)
1501  { return ::clReleaseDevice(device); }
1502 };
1503 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1504 
1507 template <>
1508 struct ReferenceHandler<cl_device_id>
1509 {
1510  // cl_device_id does not have retain().
1511  static cl_int retain(cl_device_id)
1512  { return CL_SUCCESS; }
1513  // cl_device_id does not have release().
1514  static cl_int release(cl_device_id)
1515  { return CL_SUCCESS; }
1516 };
1517 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1518 
1519 template <>
1520 struct ReferenceHandler<cl_platform_id>
1521 {
1522  // cl_platform_id does not have retain().
1523  static cl_int retain(cl_platform_id)
1524  { return CL_SUCCESS; }
1525  // cl_platform_id does not have release().
1526  static cl_int release(cl_platform_id)
1527  { return CL_SUCCESS; }
1528 };
1529 
1530 template <>
1531 struct ReferenceHandler<cl_context>
1532 {
1533  static cl_int retain(cl_context context)
1534  { return ::clRetainContext(context); }
1535  static cl_int release(cl_context context)
1536  { return ::clReleaseContext(context); }
1537 };
1538 
1539 template <>
1540 struct ReferenceHandler<cl_command_queue>
1541 {
1542  static cl_int retain(cl_command_queue queue)
1543  { return ::clRetainCommandQueue(queue); }
1544  static cl_int release(cl_command_queue queue)
1545  { return ::clReleaseCommandQueue(queue); }
1546 };
1547 
1548 template <>
1549 struct ReferenceHandler<cl_mem>
1550 {
1551  static cl_int retain(cl_mem memory)
1552  { return ::clRetainMemObject(memory); }
1553  static cl_int release(cl_mem memory)
1554  { return ::clReleaseMemObject(memory); }
1555 };
1556 
1557 template <>
1558 struct ReferenceHandler<cl_sampler>
1559 {
1560  static cl_int retain(cl_sampler sampler)
1561  { return ::clRetainSampler(sampler); }
1562  static cl_int release(cl_sampler sampler)
1563  { return ::clReleaseSampler(sampler); }
1564 };
1565 
1566 template <>
1567 struct ReferenceHandler<cl_program>
1568 {
1569  static cl_int retain(cl_program program)
1570  { return ::clRetainProgram(program); }
1571  static cl_int release(cl_program program)
1572  { return ::clReleaseProgram(program); }
1573 };
1574 
1575 template <>
1576 struct ReferenceHandler<cl_kernel>
1577 {
1578  static cl_int retain(cl_kernel kernel)
1579  { return ::clRetainKernel(kernel); }
1580  static cl_int release(cl_kernel kernel)
1581  { return ::clReleaseKernel(kernel); }
1582 };
1583 
1584 template <>
1585 struct ReferenceHandler<cl_event>
1586 {
1587  static cl_int retain(cl_event event)
1588  { return ::clRetainEvent(event); }
1589  static cl_int release(cl_event event)
1590  { return ::clReleaseEvent(event); }
1591 };
1592 
1593 
1594 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1595 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1596 static cl_uint getVersion(const vector<char> &versionInfo)
1597 {
1598  int highVersion = 0;
1599  int lowVersion = 0;
1600  int index = 7;
1601  while(versionInfo[index] != '.' ) {
1602  highVersion *= 10;
1603  highVersion += versionInfo[index]-'0';
1604  ++index;
1605  }
1606  ++index;
1607  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1608  lowVersion *= 10;
1609  lowVersion += versionInfo[index]-'0';
1610  ++index;
1611  }
1612  return (highVersion << 16) | lowVersion;
1613 }
1614 
1615 static cl_uint getPlatformVersion(cl_platform_id platform)
1616 {
1617  size_type size = 0;
1618  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1619 
1620  vector<char> versionInfo(size);
1621  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1622  return getVersion(versionInfo);
1623 }
1624 
1625 static cl_uint getDevicePlatformVersion(cl_device_id device)
1626 {
1627  cl_platform_id platform;
1628  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1629  return getPlatformVersion(platform);
1630 }
1631 
1632 static cl_uint getContextPlatformVersion(cl_context context)
1633 {
1634  // The platform cannot be queried directly, so we first have to grab a
1635  // device and obtain its context
1636  size_type size = 0;
1637  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1638  if (size == 0)
1639  return 0;
1640  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1641  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1642  return getDevicePlatformVersion(devices[0]);
1643 }
1644 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1645 
1646 template <typename T>
1647 class Wrapper
1648 {
1649 public:
1650  typedef T cl_type;
1651 
1652 protected:
1653  cl_type object_;
1654 
1655 public:
1656  Wrapper() : object_(NULL) { }
1657 
1658  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1659  {
1660  if (retainObject) {
1661  detail::errHandler(retain(), __RETAIN_ERR);
1662  }
1663  }
1664 
1665  ~Wrapper()
1666  {
1667  if (object_ != NULL) { release(); }
1668  }
1669 
1670  Wrapper(const Wrapper<cl_type>& rhs)
1671  {
1672  object_ = rhs.object_;
1673  detail::errHandler(retain(), __RETAIN_ERR);
1674  }
1675 
1676  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1677  {
1678  object_ = rhs.object_;
1679  rhs.object_ = NULL;
1680  }
1681 
1682  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1683  {
1684  if (this != &rhs) {
1685  detail::errHandler(release(), __RELEASE_ERR);
1686  object_ = rhs.object_;
1687  detail::errHandler(retain(), __RETAIN_ERR);
1688  }
1689  return *this;
1690  }
1691 
1692  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1693  {
1694  if (this != &rhs) {
1695  detail::errHandler(release(), __RELEASE_ERR);
1696  object_ = rhs.object_;
1697  rhs.object_ = NULL;
1698  }
1699  return *this;
1700  }
1701 
1702  Wrapper<cl_type>& operator = (const cl_type &rhs)
1703  {
1704  detail::errHandler(release(), __RELEASE_ERR);
1705  object_ = rhs;
1706  return *this;
1707  }
1708 
1709  const cl_type& operator ()() const { return object_; }
1710 
1711  cl_type& operator ()() { return object_; }
1712 
1713  const cl_type get() const { return object_; }
1714 
1715  cl_type get() { return object_; }
1716 
1717 
1718 protected:
1719  template<typename Func, typename U>
1720  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1721 
1722  cl_int retain() const
1723  {
1724  if (object_ != nullptr) {
1725  return ReferenceHandler<cl_type>::retain(object_);
1726  }
1727  else {
1728  return CL_SUCCESS;
1729  }
1730  }
1731 
1732  cl_int release() const
1733  {
1734  if (object_ != nullptr) {
1735  return ReferenceHandler<cl_type>::release(object_);
1736  }
1737  else {
1738  return CL_SUCCESS;
1739  }
1740  }
1741 };
1742 
1743 template <>
1744 class Wrapper<cl_device_id>
1745 {
1746 public:
1747  typedef cl_device_id cl_type;
1748 
1749 protected:
1750  cl_type object_;
1751  bool referenceCountable_;
1752 
1753  static bool isReferenceCountable(cl_device_id device)
1754  {
1755  bool retVal = false;
1756 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1757 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1758  if (device != NULL) {
1759  int version = getDevicePlatformVersion(device);
1760  if(version > ((1 << 16) + 1)) {
1761  retVal = true;
1762  }
1763  }
1764 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1765  retVal = true;
1766 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1767 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1768  return retVal;
1769  }
1770 
1771 public:
1772  Wrapper() : object_(NULL), referenceCountable_(false)
1773  {
1774  }
1775 
1776  Wrapper(const cl_type &obj, bool retainObject) :
1777  object_(obj),
1778  referenceCountable_(false)
1779  {
1780  referenceCountable_ = isReferenceCountable(obj);
1781 
1782  if (retainObject) {
1783  detail::errHandler(retain(), __RETAIN_ERR);
1784  }
1785  }
1786 
1787  ~Wrapper()
1788  {
1789  release();
1790  }
1791 
1792  Wrapper(const Wrapper<cl_type>& rhs)
1793  {
1794  object_ = rhs.object_;
1795  referenceCountable_ = isReferenceCountable(object_);
1796  detail::errHandler(retain(), __RETAIN_ERR);
1797  }
1798 
1799  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1800  {
1801  object_ = rhs.object_;
1802  referenceCountable_ = rhs.referenceCountable_;
1803  rhs.object_ = NULL;
1804  rhs.referenceCountable_ = false;
1805  }
1806 
1807  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1808  {
1809  if (this != &rhs) {
1810  detail::errHandler(release(), __RELEASE_ERR);
1811  object_ = rhs.object_;
1812  referenceCountable_ = rhs.referenceCountable_;
1813  detail::errHandler(retain(), __RETAIN_ERR);
1814  }
1815  return *this;
1816  }
1817 
1818  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1819  {
1820  if (this != &rhs) {
1821  detail::errHandler(release(), __RELEASE_ERR);
1822  object_ = rhs.object_;
1823  referenceCountable_ = rhs.referenceCountable_;
1824  rhs.object_ = NULL;
1825  rhs.referenceCountable_ = false;
1826  }
1827  return *this;
1828  }
1829 
1830  Wrapper<cl_type>& operator = (const cl_type &rhs)
1831  {
1832  detail::errHandler(release(), __RELEASE_ERR);
1833  object_ = rhs;
1834  referenceCountable_ = isReferenceCountable(object_);
1835  return *this;
1836  }
1837 
1838  const cl_type& operator ()() const { return object_; }
1839 
1840  cl_type& operator ()() { return object_; }
1841 
1842  cl_type get() const { return object_; }
1843 
1844 protected:
1845  template<typename Func, typename U>
1846  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1847 
1848  template<typename Func, typename U>
1849  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1850 
1851  cl_int retain() const
1852  {
1853  if( object_ != nullptr && referenceCountable_ ) {
1854  return ReferenceHandler<cl_type>::retain(object_);
1855  }
1856  else {
1857  return CL_SUCCESS;
1858  }
1859  }
1860 
1861  cl_int release() const
1862  {
1863  if (object_ != nullptr && referenceCountable_) {
1864  return ReferenceHandler<cl_type>::release(object_);
1865  }
1866  else {
1867  return CL_SUCCESS;
1868  }
1869  }
1870 };
1871 
1872 template <typename T>
1873 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1874 {
1875  return lhs() == rhs();
1876 }
1877 
1878 template <typename T>
1879 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1880 {
1881  return !operator==(lhs, rhs);
1882 }
1883 
1884 } // namespace detail
1886 
1887 
1888 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1889 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1890 
1893 class BuildError : public Error
1894 {
1895 private:
1896  BuildLogType buildLogs;
1897 public:
1898  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
1899  {
1900  }
1901 
1902  BuildLogType getBuildLog() const
1903  {
1904  return buildLogs;
1905  }
1906 };
1907 namespace detail {
1908  static inline cl_int buildErrHandler(
1909  cl_int err,
1910  const char * errStr,
1911  const BuildLogType &buildLogs)
1912  {
1913  if (err != CL_SUCCESS) {
1914  throw BuildError(err, errStr, buildLogs);
1915  }
1916  return err;
1917  }
1918 } // namespace detail
1919 
1920 #else
1921 namespace detail {
1922  static inline cl_int buildErrHandler(
1923  cl_int err,
1924  const char * errStr,
1925  const BuildLogType &buildLogs)
1926  {
1927  (void)buildLogs; // suppress unused variable warning
1928  (void)errStr;
1929  return err;
1930  }
1931 } // namespace detail
1932 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1933 
1934 
1940 struct ImageFormat : public cl_image_format
1941 {
1944 
1946  ImageFormat(cl_channel_order order, cl_channel_type type)
1947  {
1948  image_channel_order = order;
1949  image_channel_data_type = type;
1950  }
1951 
1953  ImageFormat& operator = (const ImageFormat& rhs)
1954  {
1955  if (this != &rhs) {
1956  this->image_channel_data_type = rhs.image_channel_data_type;
1957  this->image_channel_order = rhs.image_channel_order;
1958  }
1959  return *this;
1960  }
1961 };
1962 
1970 class Device : public detail::Wrapper<cl_device_id>
1971 {
1972 private:
1973  static std::once_flag default_initialized_;
1974  static Device default_;
1975  static cl_int default_error_;
1976 
1982  static void makeDefault();
1983 
1989  static void makeDefaultProvided(const Device &p) {
1990  default_ = p;
1991  }
1992 
1993 public:
1994 #ifdef CL_HPP_UNIT_TEST_ENABLE
1995 
2001  static void unitTestClearDefault() {
2002  default_ = Device();
2003  }
2004 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2005 
2007  Device() : detail::Wrapper<cl_type>() { }
2008 
2013  explicit Device(const cl_device_id &device, bool retainObject = false) :
2014  detail::Wrapper<cl_type>(device, retainObject) { }
2015 
2021  cl_int *errResult = NULL)
2022  {
2023  std::call_once(default_initialized_, makeDefault);
2024  detail::errHandler(default_error_);
2025  if (errResult != NULL) {
2026  *errResult = default_error_;
2027  }
2028  return default_;
2029  }
2030 
2038  static Device setDefault(const Device &default_device)
2039  {
2040  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2041  detail::errHandler(default_error_);
2042  return default_;
2043  }
2044 
2049  Device& operator = (const cl_device_id& rhs)
2050  {
2052  return *this;
2053  }
2054 
2058  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2059 
2063  Device& operator = (const Device &dev)
2064  {
2066  return *this;
2067  }
2068 
2072  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2073 
2077  Device& operator = (Device &&dev)
2078  {
2079  detail::Wrapper<cl_type>::operator=(std::move(dev));
2080  return *this;
2081  }
2082 
2084  template <typename T>
2085  cl_int getInfo(cl_device_info name, T* param) const
2086  {
2087  return detail::errHandler(
2088  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2089  __GET_DEVICE_INFO_ERR);
2090  }
2091 
2093  template <cl_int name> typename
2095  getInfo(cl_int* err = NULL) const
2096  {
2097  typename detail::param_traits<
2098  detail::cl_device_info, name>::param_type param;
2099  cl_int result = getInfo(name, &param);
2100  if (err != NULL) {
2101  *err = result;
2102  }
2103  return param;
2104  }
2105 
2109 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2110  cl_int createSubDevices(
2112  const cl_device_partition_property * properties,
2113  vector<Device>* devices)
2114  {
2115  cl_uint n = 0;
2116  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2117  if (err != CL_SUCCESS) {
2118  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2119  }
2120 
2121  vector<cl_device_id> ids(n);
2122  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2123  if (err != CL_SUCCESS) {
2124  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2125  }
2126 
2127  // Cannot trivially assign because we need to capture intermediates
2128  // with safe construction
2129  if (devices) {
2130  devices->resize(ids.size());
2131 
2132  // Assign to param, constructing with retain behaviour
2133  // to correctly capture each underlying CL object
2134  for (size_type i = 0; i < ids.size(); i++) {
2135  // We do not need to retain because this device is being created
2136  // by the runtime
2137  (*devices)[i] = Device(ids[i], false);
2138  }
2139  }
2140 
2141  return CL_SUCCESS;
2142  }
2143 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2144 
2148  cl_int createSubDevices(
2149  const cl_device_partition_property_ext * properties,
2150  vector<Device>* devices)
2151  {
2152  typedef CL_API_ENTRY cl_int
2153  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2154  cl_device_id /*in_device*/,
2155  const cl_device_partition_property_ext * /* properties */,
2156  cl_uint /*num_entries*/,
2157  cl_device_id * /*out_devices*/,
2158  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2159 
2160  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2161  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2162 
2163  cl_uint n = 0;
2164  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2165  if (err != CL_SUCCESS) {
2166  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2167  }
2168 
2169  vector<cl_device_id> ids(n);
2170  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2171  if (err != CL_SUCCESS) {
2172  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2173  }
2174  // Cannot trivially assign because we need to capture intermediates
2175  // with safe construction
2176  if (devices) {
2177  devices->resize(ids.size());
2178 
2179  // Assign to param, constructing with retain behaviour
2180  // to correctly capture each underlying CL object
2181  for (size_type i = 0; i < ids.size(); i++) {
2182  // We do not need to retain because this device is being created
2183  // by the runtime
2184  (*devices)[i] = Device(ids[i], false);
2185  }
2186  }
2187  return CL_SUCCESS;
2188  }
2189 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2190 };
2191 
2192 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2193 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2194 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2195 
2203 class Platform : public detail::Wrapper<cl_platform_id>
2204 {
2205 private:
2206  static std::once_flag default_initialized_;
2207  static Platform default_;
2208  static cl_int default_error_;
2209 
2215  static void makeDefault() {
2216  /* Throwing an exception from a call_once invocation does not do
2217  * what we wish, so we catch it and save the error.
2218  */
2219 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2220  try
2221 #endif
2222  {
2223  // If default wasn't passed ,generate one
2224  // Otherwise set it
2225  cl_uint n = 0;
2226 
2227  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2228  if (err != CL_SUCCESS) {
2229  default_error_ = err;
2230  return;
2231  }
2232  if (n == 0) {
2233  default_error_ = CL_INVALID_PLATFORM;
2234  return;
2235  }
2236 
2237  vector<cl_platform_id> ids(n);
2238  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2239  if (err != CL_SUCCESS) {
2240  default_error_ = err;
2241  return;
2242  }
2243 
2244  default_ = Platform(ids[0]);
2245  }
2246 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2247  catch (cl::Error &e) {
2248  default_error_ = e.err();
2249  }
2250 #endif
2251  }
2252 
2258  static void makeDefaultProvided(const Platform &p) {
2259  default_ = p;
2260  }
2261 
2262 public:
2263 #ifdef CL_HPP_UNIT_TEST_ENABLE
2264 
2270  static void unitTestClearDefault() {
2271  default_ = Platform();
2272  }
2273 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2274 
2276  Platform() : detail::Wrapper<cl_type>() { }
2277 
2285  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2286  detail::Wrapper<cl_type>(platform, retainObject) { }
2287 
2292  Platform& operator = (const cl_platform_id& rhs)
2293  {
2295  return *this;
2296  }
2297 
2298  static Platform getDefault(
2299  cl_int *errResult = NULL)
2300  {
2301  std::call_once(default_initialized_, makeDefault);
2302  detail::errHandler(default_error_);
2303  if (errResult != NULL) {
2304  *errResult = default_error_;
2305  }
2306  return default_;
2307  }
2308 
2316  static Platform setDefault(const Platform &default_platform)
2317  {
2318  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2319  detail::errHandler(default_error_);
2320  return default_;
2321  }
2322 
2324  cl_int getInfo(cl_platform_info name, string* param) const
2325  {
2326  return detail::errHandler(
2327  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2328  __GET_PLATFORM_INFO_ERR);
2329  }
2330 
2332  template <cl_int name> typename
2334  getInfo(cl_int* err = NULL) const
2335  {
2336  typename detail::param_traits<
2337  detail::cl_platform_info, name>::param_type param;
2338  cl_int result = getInfo(name, &param);
2339  if (err != NULL) {
2340  *err = result;
2341  }
2342  return param;
2343  }
2344 
2349  cl_int getDevices(
2350  cl_device_type type,
2351  vector<Device>* devices) const
2352  {
2353  cl_uint n = 0;
2354  if( devices == NULL ) {
2355  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2356  }
2357  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2358  if (err != CL_SUCCESS) {
2359  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2360  }
2361 
2362  vector<cl_device_id> ids(n);
2363  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2364  if (err != CL_SUCCESS) {
2365  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2366  }
2367 
2368  // Cannot trivially assign because we need to capture intermediates
2369  // with safe construction
2370  // We must retain things we obtain from the API to avoid releasing
2371  // API-owned objects.
2372  if (devices) {
2373  devices->resize(ids.size());
2374 
2375  // Assign to param, constructing with retain behaviour
2376  // to correctly capture each underlying CL object
2377  for (size_type i = 0; i < ids.size(); i++) {
2378  (*devices)[i] = Device(ids[i], true);
2379  }
2380  }
2381  return CL_SUCCESS;
2382  }
2383 
2384 #if defined(CL_HPP_USE_DX_INTEROP)
2385 
2408  cl_int getDevices(
2409  cl_d3d10_device_source_khr d3d_device_source,
2410  void * d3d_object,
2411  cl_d3d10_device_set_khr d3d_device_set,
2412  vector<Device>* devices) const
2413  {
2414  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2415  cl_platform_id platform,
2416  cl_d3d10_device_source_khr d3d_device_source,
2417  void * d3d_object,
2418  cl_d3d10_device_set_khr d3d_device_set,
2419  cl_uint num_entries,
2420  cl_device_id * devices,
2421  cl_uint* num_devices);
2422 
2423  if( devices == NULL ) {
2424  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2425  }
2426 
2427  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2428  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2429 
2430  cl_uint n = 0;
2431  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2432  object_,
2433  d3d_device_source,
2434  d3d_object,
2435  d3d_device_set,
2436  0,
2437  NULL,
2438  &n);
2439  if (err != CL_SUCCESS) {
2440  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2441  }
2442 
2443  vector<cl_device_id> ids(n);
2444  err = pfn_clGetDeviceIDsFromD3D10KHR(
2445  object_,
2446  d3d_device_source,
2447  d3d_object,
2448  d3d_device_set,
2449  n,
2450  ids.data(),
2451  NULL);
2452  if (err != CL_SUCCESS) {
2453  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2454  }
2455 
2456  // Cannot trivially assign because we need to capture intermediates
2457  // with safe construction
2458  // We must retain things we obtain from the API to avoid releasing
2459  // API-owned objects.
2460  if (devices) {
2461  devices->resize(ids.size());
2462 
2463  // Assign to param, constructing with retain behaviour
2464  // to correctly capture each underlying CL object
2465  for (size_type i = 0; i < ids.size(); i++) {
2466  (*devices)[i] = Device(ids[i], true);
2467  }
2468  }
2469  return CL_SUCCESS;
2470  }
2471 #endif
2472 
2477  static cl_int get(
2478  vector<Platform>* platforms)
2479  {
2480  cl_uint n = 0;
2481 
2482  if( platforms == NULL ) {
2483  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2484  }
2485 
2486  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2487  if (err != CL_SUCCESS) {
2488  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2489  }
2490 
2491  vector<cl_platform_id> ids(n);
2492  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2493  if (err != CL_SUCCESS) {
2494  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2495  }
2496 
2497  if (platforms) {
2498  platforms->resize(ids.size());
2499 
2500  // Platforms don't reference count
2501  for (size_type i = 0; i < ids.size(); i++) {
2502  (*platforms)[i] = Platform(ids[i]);
2503  }
2504  }
2505  return CL_SUCCESS;
2506  }
2507 
2512  static cl_int get(
2513  Platform * platform)
2514  {
2515  cl_int err;
2516  Platform default_platform = Platform::getDefault(&err);
2517  if (platform) {
2518  *platform = default_platform;
2519  }
2520  return err;
2521  }
2522 
2531  static Platform get(
2532  cl_int * errResult = NULL)
2533  {
2534  cl_int err;
2535  Platform default_platform = Platform::getDefault(&err);
2536  if (errResult) {
2537  *errResult = err;
2538  }
2539  return default_platform;
2540  }
2541 
2542 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2543  cl_int
2546  {
2547  return ::clUnloadPlatformCompiler(object_);
2548  }
2549 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2550 }; // class Platform
2551 
2552 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2553 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2554 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2555 
2556 
2560 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2561 
2565 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2566 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2567 inline cl_int
2569 {
2570  return ::clUnloadCompiler();
2571 }
2572 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2573 
2582 class Context
2583  : public detail::Wrapper<cl_context>
2584 {
2585 private:
2586  static std::once_flag default_initialized_;
2587  static Context default_;
2588  static cl_int default_error_;
2589 
2595  static void makeDefault() {
2596  /* Throwing an exception from a call_once invocation does not do
2597  * what we wish, so we catch it and save the error.
2598  */
2599 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2600  try
2601 #endif
2602  {
2603 #if !defined(__APPLE__) && !defined(__MACOS)
2604  const Platform &p = Platform::getDefault();
2605  cl_platform_id defaultPlatform = p();
2606  cl_context_properties properties[3] = {
2607  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2608  };
2609 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2610  cl_context_properties *properties = nullptr;
2611 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2612 
2613  default_ = Context(
2614  CL_DEVICE_TYPE_DEFAULT,
2615  properties,
2616  NULL,
2617  NULL,
2618  &default_error_);
2619  }
2620 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2621  catch (cl::Error &e) {
2622  default_error_ = e.err();
2623  }
2624 #endif
2625  }
2626 
2627 
2633  static void makeDefaultProvided(const Context &c) {
2634  default_ = c;
2635  }
2636 
2637 public:
2638 #ifdef CL_HPP_UNIT_TEST_ENABLE
2639 
2645  static void unitTestClearDefault() {
2646  default_ = Context();
2647  }
2648 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2649 
2655  const vector<Device>& devices,
2656  cl_context_properties* properties = NULL,
2657  void (CL_CALLBACK * notifyFptr)(
2658  const char *,
2659  const void *,
2660  size_type,
2661  void *) = NULL,
2662  void* data = NULL,
2663  cl_int* err = NULL)
2664  {
2665  cl_int error;
2666 
2667  size_type numDevices = devices.size();
2668  vector<cl_device_id> deviceIDs(numDevices);
2669 
2670  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2671  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2672  }
2673 
2674  object_ = ::clCreateContext(
2675  properties, (cl_uint) numDevices,
2676  deviceIDs.data(),
2677  notifyFptr, data, &error);
2678 
2679  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2680  if (err != NULL) {
2681  *err = error;
2682  }
2683  }
2684 
2685  Context(
2686  const Device& device,
2687  cl_context_properties* properties = NULL,
2688  void (CL_CALLBACK * notifyFptr)(
2689  const char *,
2690  const void *,
2691  size_type,
2692  void *) = NULL,
2693  void* data = NULL,
2694  cl_int* err = NULL)
2695  {
2696  cl_int error;
2697 
2698  cl_device_id deviceID = device();
2699 
2700  object_ = ::clCreateContext(
2701  properties, 1,
2702  &deviceID,
2703  notifyFptr, data, &error);
2704 
2705  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2706  if (err != NULL) {
2707  *err = error;
2708  }
2709  }
2710 
2716  cl_device_type type,
2717  cl_context_properties* properties = NULL,
2718  void (CL_CALLBACK * notifyFptr)(
2719  const char *,
2720  const void *,
2721  size_type,
2722  void *) = NULL,
2723  void* data = NULL,
2724  cl_int* err = NULL)
2725  {
2726  cl_int error;
2727 
2728 #if !defined(__APPLE__) && !defined(__MACOS)
2729  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2730 
2731  if (properties == NULL) {
2732  // Get a valid platform ID as we cannot send in a blank one
2733  vector<Platform> platforms;
2734  error = Platform::get(&platforms);
2735  if (error != CL_SUCCESS) {
2736  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2737  if (err != NULL) {
2738  *err = error;
2739  }
2740  return;
2741  }
2742 
2743  // Check the platforms we found for a device of our specified type
2744  cl_context_properties platform_id = 0;
2745  for (unsigned int i = 0; i < platforms.size(); i++) {
2746 
2747  vector<Device> devices;
2748 
2749 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2750  try {
2751 #endif
2752 
2753  error = platforms[i].getDevices(type, &devices);
2754 
2755 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2756  } catch (cl::Error& e) {
2757  error = e.err();
2758  }
2759  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2760  // We do error checking next anyway, and can throw there if needed
2761 #endif
2762 
2763  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2764  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2765  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2766  if (err != NULL) {
2767  *err = error;
2768  }
2769  }
2770 
2771  if (devices.size() > 0) {
2772  platform_id = (cl_context_properties)platforms[i]();
2773  break;
2774  }
2775  }
2776 
2777  if (platform_id == 0) {
2778  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2779  if (err != NULL) {
2780  *err = CL_DEVICE_NOT_FOUND;
2781  }
2782  return;
2783  }
2784 
2785  prop[1] = platform_id;
2786  properties = &prop[0];
2787  }
2788 #endif
2789  object_ = ::clCreateContextFromType(
2790  properties, type, notifyFptr, data, &error);
2791 
2792  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2793  if (err != NULL) {
2794  *err = error;
2795  }
2796  }
2797 
2801  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2802 
2806  Context& operator = (const Context &ctx)
2807  {
2809  return *this;
2810  }
2811 
2815  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2816 
2820  Context& operator = (Context &&ctx)
2821  {
2822  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2823  return *this;
2824  }
2825 
2826 
2831  static Context getDefault(cl_int * err = NULL)
2832  {
2833  std::call_once(default_initialized_, makeDefault);
2834  detail::errHandler(default_error_);
2835  if (err != NULL) {
2836  *err = default_error_;
2837  }
2838  return default_;
2839  }
2840 
2848  static Context setDefault(const Context &default_context)
2849  {
2850  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
2851  detail::errHandler(default_error_);
2852  return default_;
2853  }
2854 
2856  Context() : detail::Wrapper<cl_type>() { }
2857 
2863  explicit Context(const cl_context& context, bool retainObject = false) :
2864  detail::Wrapper<cl_type>(context, retainObject) { }
2865 
2871  Context& operator = (const cl_context& rhs)
2872  {
2874  return *this;
2875  }
2876 
2878  template <typename T>
2879  cl_int getInfo(cl_context_info name, T* param) const
2880  {
2881  return detail::errHandler(
2882  detail::getInfo(&::clGetContextInfo, object_, name, param),
2883  __GET_CONTEXT_INFO_ERR);
2884  }
2885 
2887  template <cl_int name> typename
2889  getInfo(cl_int* err = NULL) const
2890  {
2891  typename detail::param_traits<
2892  detail::cl_context_info, name>::param_type param;
2893  cl_int result = getInfo(name, &param);
2894  if (err != NULL) {
2895  *err = result;
2896  }
2897  return param;
2898  }
2899 
2905  cl_mem_flags flags,
2906  cl_mem_object_type type,
2907  vector<ImageFormat>* formats) const
2908  {
2909  cl_uint numEntries;
2910 
2911  if (!formats) {
2912  return CL_SUCCESS;
2913  }
2914 
2915  cl_int err = ::clGetSupportedImageFormats(
2916  object_,
2917  flags,
2918  type,
2919  0,
2920  NULL,
2921  &numEntries);
2922  if (err != CL_SUCCESS) {
2923  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2924  }
2925 
2926  if (numEntries > 0) {
2927  vector<ImageFormat> value(numEntries);
2928  err = ::clGetSupportedImageFormats(
2929  object_,
2930  flags,
2931  type,
2932  numEntries,
2933  (cl_image_format*)value.data(),
2934  NULL);
2935  if (err != CL_SUCCESS) {
2936  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2937  }
2938 
2939  formats->assign(begin(value), end(value));
2940  }
2941  else {
2942  // If no values are being returned, ensure an empty vector comes back
2943  formats->clear();
2944  }
2945 
2946  return CL_SUCCESS;
2947  }
2948 };
2949 
2950 inline void Device::makeDefault()
2951 {
2952  /* Throwing an exception from a call_once invocation does not do
2953  * what we wish, so we catch it and save the error.
2954  */
2955 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2956  try
2957 #endif
2958  {
2959  cl_int error = 0;
2960 
2961  Context context = Context::getDefault(&error);
2962  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2963 
2964  if (error != CL_SUCCESS) {
2965  default_error_ = error;
2966  }
2967  else {
2968  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
2969  default_error_ = CL_SUCCESS;
2970  }
2971  }
2972 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2973  catch (cl::Error &e) {
2974  default_error_ = e.err();
2975  }
2976 #endif
2977 }
2978 
2979 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
2980 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
2981 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
2982 
2991 class Event : public detail::Wrapper<cl_event>
2992 {
2993 public:
2995  Event() : detail::Wrapper<cl_type>() { }
2996 
3005  explicit Event(const cl_event& event, bool retainObject = false) :
3006  detail::Wrapper<cl_type>(event, retainObject) { }
3007 
3013  Event& operator = (const cl_event& rhs)
3014  {
3016  return *this;
3017  }
3018 
3020  template <typename T>
3021  cl_int getInfo(cl_event_info name, T* param) const
3022  {
3023  return detail::errHandler(
3024  detail::getInfo(&::clGetEventInfo, object_, name, param),
3025  __GET_EVENT_INFO_ERR);
3026  }
3027 
3029  template <cl_int name> typename
3031  getInfo(cl_int* err = NULL) const
3032  {
3033  typename detail::param_traits<
3034  detail::cl_event_info, name>::param_type param;
3035  cl_int result = getInfo(name, &param);
3036  if (err != NULL) {
3037  *err = result;
3038  }
3039  return param;
3040  }
3041 
3043  template <typename T>
3044  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3045  {
3046  return detail::errHandler(detail::getInfo(
3047  &::clGetEventProfilingInfo, object_, name, param),
3048  __GET_EVENT_PROFILE_INFO_ERR);
3049  }
3050 
3052  template <cl_int name> typename
3054  getProfilingInfo(cl_int* err = NULL) const
3055  {
3056  typename detail::param_traits<
3057  detail::cl_profiling_info, name>::param_type param;
3058  cl_int result = getProfilingInfo(name, &param);
3059  if (err != NULL) {
3060  *err = result;
3061  }
3062  return param;
3063  }
3064 
3069  cl_int wait() const
3070  {
3071  return detail::errHandler(
3072  ::clWaitForEvents(1, &object_),
3073  __WAIT_FOR_EVENTS_ERR);
3074  }
3075 
3076 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3077 
3081  cl_int setCallback(
3082  cl_int type,
3083  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3084  void * user_data = NULL)
3085  {
3086  return detail::errHandler(
3087  ::clSetEventCallback(
3088  object_,
3089  type,
3090  pfn_notify,
3091  user_data),
3092  __SET_EVENT_CALLBACK_ERR);
3093  }
3094 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3095 
3100  static cl_int
3101  waitForEvents(const vector<Event>& events)
3102  {
3103  return detail::errHandler(
3104  ::clWaitForEvents(
3105  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3106  __WAIT_FOR_EVENTS_ERR);
3107  }
3108 };
3109 
3110 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3111 
3115 class UserEvent : public Event
3116 {
3117 public:
3123  const Context& context,
3124  cl_int * err = NULL)
3125  {
3126  cl_int error;
3127  object_ = ::clCreateUserEvent(
3128  context(),
3129  &error);
3130 
3131  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3132  if (err != NULL) {
3133  *err = error;
3134  }
3135  }
3136 
3138  UserEvent() : Event() { }
3139 
3144  cl_int setStatus(cl_int status)
3145  {
3146  return detail::errHandler(
3147  ::clSetUserEventStatus(object_,status),
3148  __SET_USER_EVENT_STATUS_ERR);
3149  }
3150 };
3151 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3152 
3157 inline static cl_int
3158 WaitForEvents(const vector<Event>& events)
3159 {
3160  return detail::errHandler(
3161  ::clWaitForEvents(
3162  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3163  __WAIT_FOR_EVENTS_ERR);
3164 }
3165 
3174 class Memory : public detail::Wrapper<cl_mem>
3175 {
3176 public:
3178  Memory() : detail::Wrapper<cl_type>() { }
3179 
3191  explicit Memory(const cl_mem& memory, bool retainObject) :
3192  detail::Wrapper<cl_type>(memory, retainObject) { }
3193 
3199  Memory& operator = (const cl_mem& rhs)
3200  {
3202  return *this;
3203  }
3204 
3208  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3209 
3213  Memory& operator = (const Memory &mem)
3214  {
3216  return *this;
3217  }
3218 
3222  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3223 
3227  Memory& operator = (Memory &&mem)
3228  {
3229  detail::Wrapper<cl_type>::operator=(std::move(mem));
3230  return *this;
3231  }
3232 
3233 
3235  template <typename T>
3236  cl_int getInfo(cl_mem_info name, T* param) const
3237  {
3238  return detail::errHandler(
3239  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3240  __GET_MEM_OBJECT_INFO_ERR);
3241  }
3242 
3244  template <cl_int name> typename
3246  getInfo(cl_int* err = NULL) const
3247  {
3248  typename detail::param_traits<
3249  detail::cl_mem_info, name>::param_type param;
3250  cl_int result = getInfo(name, &param);
3251  if (err != NULL) {
3252  *err = result;
3253  }
3254  return param;
3255  }
3256 
3257 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3258 
3272  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3273  void * user_data = NULL)
3274  {
3275  return detail::errHandler(
3276  ::clSetMemObjectDestructorCallback(
3277  object_,
3278  pfn_notify,
3279  user_data),
3280  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3281  }
3282 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3283 
3284 };
3285 
3286 // Pre-declare copy functions
3287 class Buffer;
3288 template< typename IteratorType >
3289 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3290 template< typename IteratorType >
3291 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3292 template< typename IteratorType >
3293 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3294 template< typename IteratorType >
3295 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3296 
3297 
3298 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3299 namespace detail
3300 {
3302  {
3303  public:
3304  static cl_svm_mem_flags getSVMMemFlags()
3305  {
3306  return 0;
3307  }
3308  };
3309 } // namespace detail
3310 
3311 template<class Trait = detail::SVMTraitNull>
3313 {
3314 public:
3315  static cl_svm_mem_flags getSVMMemFlags()
3316  {
3317  return CL_MEM_READ_WRITE |
3318  Trait::getSVMMemFlags();
3319  }
3320 };
3321 
3322 template<class Trait = detail::SVMTraitNull>
3324 {
3325 public:
3326  static cl_svm_mem_flags getSVMMemFlags()
3327  {
3328  return CL_MEM_READ_ONLY |
3329  Trait::getSVMMemFlags();
3330  }
3331 };
3332 
3333 template<class Trait = detail::SVMTraitNull>
3335 {
3336 public:
3337  static cl_svm_mem_flags getSVMMemFlags()
3338  {
3339  return CL_MEM_WRITE_ONLY |
3340  Trait::getSVMMemFlags();
3341  }
3342 };
3343 
3344 template<class Trait = SVMTraitReadWrite<>>
3346 {
3347 public:
3348  static cl_svm_mem_flags getSVMMemFlags()
3349  {
3350  return Trait::getSVMMemFlags();
3351  }
3352 };
3353 
3354 template<class Trait = SVMTraitReadWrite<>>
3356 {
3357 public:
3358  static cl_svm_mem_flags getSVMMemFlags()
3359  {
3360  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3361  Trait::getSVMMemFlags();
3362  }
3363 };
3364 
3365 template<class Trait = SVMTraitReadWrite<>>
3367 {
3368 public:
3369  static cl_svm_mem_flags getSVMMemFlags()
3370  {
3371  return
3372  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3373  CL_MEM_SVM_ATOMICS |
3374  Trait::getSVMMemFlags();
3375  }
3376 };
3377 
3378 // Pre-declare SVM map function
3379 template<typename T>
3380 inline cl_int enqueueMapSVM(
3381  T* ptr,
3382  cl_bool blocking,
3383  cl_map_flags flags,
3384  size_type size,
3385  const vector<Event>* events = NULL,
3386  Event* event = NULL);
3387 
3399 template<typename T, class SVMTrait>
3401 private:
3402  Context context_;
3403 
3404 public:
3405  typedef T value_type;
3406  typedef value_type* pointer;
3407  typedef const value_type* const_pointer;
3408  typedef value_type& reference;
3409  typedef const value_type& const_reference;
3410  typedef std::size_t size_type;
3411  typedef std::ptrdiff_t difference_type;
3412 
3413  template<typename U>
3414  struct rebind
3415  {
3417  };
3418 
3419  template<typename U, typename V>
3420  friend class SVMAllocator;
3421 
3422  SVMAllocator() :
3423  context_(Context::getDefault())
3424  {
3425  }
3426 
3427  explicit SVMAllocator(cl::Context context) :
3428  context_(context)
3429  {
3430  }
3431 
3432 
3433  SVMAllocator(const SVMAllocator &other) :
3434  context_(other.context_)
3435  {
3436  }
3437 
3438  template<typename U>
3439  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3440  context_(other.context_)
3441  {
3442  }
3443 
3444  ~SVMAllocator()
3445  {
3446  }
3447 
3448  pointer address(reference r) CL_HPP_NOEXCEPT_
3449  {
3450  return std::addressof(r);
3451  }
3452 
3453  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3454  {
3455  return std::addressof(r);
3456  }
3457 
3464  pointer allocate(
3465  size_type size,
3467  {
3468  // Allocate memory with default alignment matching the size of the type
3469  void* voidPointer =
3470  clSVMAlloc(
3471  context_(),
3472  SVMTrait::getSVMMemFlags(),
3473  size*sizeof(T),
3474  0);
3475  pointer retValue = reinterpret_cast<pointer>(
3476  voidPointer);
3477 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3478  if (!retValue) {
3479  std::bad_alloc excep;
3480  throw excep;
3481  }
3482 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3483 
3484  // If allocation was coarse-grained then map it
3485  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3486  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3487  if (err != CL_SUCCESS) {
3488  std::bad_alloc excep;
3489  throw excep;
3490  }
3491  }
3492 
3493  // If exceptions disabled, return null pointer from allocator
3494  return retValue;
3495  }
3496 
3497  void deallocate(pointer p, size_type)
3498  {
3499  clSVMFree(context_(), p);
3500  }
3501 
3506  size_type max_size() const CL_HPP_NOEXCEPT_
3507  {
3508  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3509 
3510  for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3511  maxSize = std::min(
3512  maxSize,
3513  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3514  }
3515 
3516  return maxSize;
3517  }
3518 
3519  template< class U, class... Args >
3520  void construct(U* p, Args&&... args)
3521  {
3522  new(p)T(args...);
3523  }
3524 
3525  template< class U >
3526  void destroy(U* p)
3527  {
3528  p->~U();
3529  }
3530 
3534  inline bool operator==(SVMAllocator const& rhs)
3535  {
3536  return (context_==rhs.context_);
3537  }
3538 
3539  inline bool operator!=(SVMAllocator const& a)
3540  {
3541  return !operator==(a);
3542  }
3543 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3544 
3545 
3546 template<class SVMTrait>
3547 class SVMAllocator<void, SVMTrait> {
3548 public:
3549  typedef void value_type;
3550  typedef value_type* pointer;
3551  typedef const value_type* const_pointer;
3552 
3553  template<typename U>
3554  struct rebind
3555  {
3557  };
3558 
3559  template<typename U, typename V>
3560  friend class SVMAllocator;
3561 };
3562 
3563 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3564 namespace detail
3565 {
3566  template<class Alloc>
3567  class Deleter {
3568  private:
3569  Alloc alloc_;
3570  size_type copies_;
3571 
3572  public:
3573  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3574 
3575  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3576  {
3577  }
3578 
3579  void operator()(pointer ptr) const {
3580  Alloc tmpAlloc{ alloc_ };
3581  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3582  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3583  }
3584  };
3585 } // namespace detail
3586 
3593 template <class T, class Alloc, class... Args>
3594 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3595 {
3596  Alloc alloc(alloc_);
3597  static const size_type copies = 1;
3598 
3599  // Ensure that creation of the management block and the
3600  // object are dealt with separately such that we only provide a deleter
3601 
3602  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3603  if (!tmp) {
3604  std::bad_alloc excep;
3605  throw excep;
3606  }
3607  try {
3608  std::allocator_traits<Alloc>::construct(
3609  alloc,
3610  std::addressof(*tmp),
3611  std::forward<Args>(args)...);
3612 
3613  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3614  }
3615  catch (std::bad_alloc b)
3616  {
3617  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3618  throw;
3619  }
3620 }
3621 
3622 template< class T, class SVMTrait, class... Args >
3623 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3624 {
3626  return cl::allocate_pointer<T>(alloc, args...);
3627 }
3628 
3629 template< class T, class SVMTrait, class... Args >
3630 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3631 {
3632  SVMAllocator<T, SVMTrait> alloc(c);
3633  return cl::allocate_pointer<T>(alloc, args...);
3634 }
3635 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3636 
3640 template < class T >
3641 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3642 
3646 template < class T >
3647 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3648 
3652 template < class T >
3653 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3654 
3655 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3656 
3657 
3664 class Buffer : public Memory
3665 {
3666 public:
3667 
3676  const Context& context,
3677  cl_mem_flags flags,
3678  size_type size,
3679  void* host_ptr = NULL,
3680  cl_int* err = NULL)
3681  {
3682  cl_int error;
3683  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3684 
3685  detail::errHandler(error, __CREATE_BUFFER_ERR);
3686  if (err != NULL) {
3687  *err = error;
3688  }
3689  }
3690 
3701  cl_mem_flags flags,
3702  size_type size,
3703  void* host_ptr = NULL,
3704  cl_int* err = NULL)
3705  {
3706  cl_int error;
3707 
3708  Context context = Context::getDefault(err);
3709 
3710  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3711 
3712  detail::errHandler(error, __CREATE_BUFFER_ERR);
3713  if (err != NULL) {
3714  *err = error;
3715  }
3716  }
3717 
3723  template< typename IteratorType >
3725  IteratorType startIterator,
3726  IteratorType endIterator,
3727  bool readOnly,
3728  bool useHostPtr = false,
3729  cl_int* err = NULL)
3730  {
3731  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3732  cl_int error;
3733 
3734  cl_mem_flags flags = 0;
3735  if( readOnly ) {
3736  flags |= CL_MEM_READ_ONLY;
3737  }
3738  else {
3739  flags |= CL_MEM_READ_WRITE;
3740  }
3741  if( useHostPtr ) {
3742  flags |= CL_MEM_USE_HOST_PTR;
3743  }
3744 
3745  size_type size = sizeof(DataType)*(endIterator - startIterator);
3746 
3747  Context context = Context::getDefault(err);
3748 
3749  if( useHostPtr ) {
3750  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3751  } else {
3752  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3753  }
3754 
3755  detail::errHandler(error, __CREATE_BUFFER_ERR);
3756  if (err != NULL) {
3757  *err = error;
3758  }
3759 
3760  if( !useHostPtr ) {
3761  error = cl::copy(startIterator, endIterator, *this);
3762  detail::errHandler(error, __CREATE_BUFFER_ERR);
3763  if (err != NULL) {
3764  *err = error;
3765  }
3766  }
3767  }
3768 
3774  template< typename IteratorType >
3775  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3776  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3777 
3782  template< typename IteratorType >
3783  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3784  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3785 
3787  Buffer() : Memory() { }
3788 
3796  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3797  Memory(buffer, retainObject) { }
3798 
3803  Buffer& operator = (const cl_mem& rhs)
3804  {
3805  Memory::operator=(rhs);
3806  return *this;
3807  }
3808 
3812  Buffer(const Buffer& buf) : Memory(buf) {}
3813 
3817  Buffer& operator = (const Buffer &buf)
3818  {
3819  Memory::operator=(buf);
3820  return *this;
3821  }
3822 
3826  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3827 
3831  Buffer& operator = (Buffer &&buf)
3832  {
3833  Memory::operator=(std::move(buf));
3834  return *this;
3835  }
3836 
3837 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3838 
3843  cl_mem_flags flags,
3844  cl_buffer_create_type buffer_create_type,
3845  const void * buffer_create_info,
3846  cl_int * err = NULL)
3847  {
3848  Buffer result;
3849  cl_int error;
3850  result.object_ = ::clCreateSubBuffer(
3851  object_,
3852  flags,
3853  buffer_create_type,
3854  buffer_create_info,
3855  &error);
3856 
3857  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
3858  if (err != NULL) {
3859  *err = error;
3860  }
3861 
3862  return result;
3863  }
3864 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3865 };
3866 
3867 #if defined (CL_HPP_USE_DX_INTEROP)
3868 
3876 class BufferD3D10 : public Buffer
3877 {
3878 public:
3879 
3880 
3886  BufferD3D10(
3887  const Context& context,
3888  cl_mem_flags flags,
3889  ID3D10Buffer* bufobj,
3890  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
3891  {
3892  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
3893  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
3894  cl_int* errcode_ret);
3895  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
3896 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
3897  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
3898  cl_platform platform = -1;
3899  for( int i = 0; i < props.size(); ++i ) {
3900  if( props[i] == CL_CONTEXT_PLATFORM ) {
3901  platform = props[i+1];
3902  }
3903  }
3904  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
3905 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
3906  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
3907 #endif
3908 
3909  cl_int error;
3910  object_ = pfn_clCreateFromD3D10BufferKHR(
3911  context(),
3912  flags,
3913  bufobj,
3914  &error);
3915 
3916  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
3917  if (err != NULL) {
3918  *err = error;
3919  }
3920  }
3921 
3923  BufferD3D10() : Buffer() { }
3924 
3932  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
3933  Buffer(buffer, retainObject) { }
3934 
3939  BufferD3D10& operator = (const cl_mem& rhs)
3940  {
3941  Buffer::operator=(rhs);
3942  return *this;
3943  }
3944 
3948  BufferD3D10(const BufferD3D10& buf) :
3949  Buffer(buf) {}
3950 
3954  BufferD3D10& operator = (const BufferD3D10 &buf)
3955  {
3956  Buffer::operator=(buf);
3957  return *this;
3958  }
3959 
3963  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
3964 
3968  BufferD3D10& operator = (BufferD3D10 &&buf)
3969  {
3970  Buffer::operator=(std::move(buf));
3971  return *this;
3972  }
3973 };
3974 #endif
3975 
3984 class BufferGL : public Buffer
3985 {
3986 public:
3993  const Context& context,
3994  cl_mem_flags flags,
3995  cl_GLuint bufobj,
3996  cl_int * err = NULL)
3997  {
3998  cl_int error;
3999  object_ = ::clCreateFromGLBuffer(
4000  context(),
4001  flags,
4002  bufobj,
4003  &error);
4004 
4005  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4006  if (err != NULL) {
4007  *err = error;
4008  }
4009  }
4010 
4012  BufferGL() : Buffer() { }
4013 
4021  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4022  Buffer(buffer, retainObject) { }
4023 
4028  BufferGL& operator = (const cl_mem& rhs)
4029  {
4030  Buffer::operator=(rhs);
4031  return *this;
4032  }
4033 
4037  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4038 
4042  BufferGL& operator = (const BufferGL &buf)
4043  {
4044  Buffer::operator=(buf);
4045  return *this;
4046  }
4047 
4051  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4052 
4056  BufferGL& operator = (BufferGL &&buf)
4057  {
4058  Buffer::operator=(std::move(buf));
4059  return *this;
4060  }
4061 
4064  cl_gl_object_type *type,
4065  cl_GLuint * gl_object_name)
4066  {
4067  return detail::errHandler(
4068  ::clGetGLObjectInfo(object_,type,gl_object_name),
4069  __GET_GL_OBJECT_INFO_ERR);
4070  }
4071 };
4072 
4081 class BufferRenderGL : public Buffer
4082 {
4083 public:
4090  const Context& context,
4091  cl_mem_flags flags,
4092  cl_GLuint bufobj,
4093  cl_int * err = NULL)
4094  {
4095  cl_int error;
4096  object_ = ::clCreateFromGLRenderbuffer(
4097  context(),
4098  flags,
4099  bufobj,
4100  &error);
4101 
4102  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4103  if (err != NULL) {
4104  *err = error;
4105  }
4106  }
4107 
4110 
4118  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4119  Buffer(buffer, retainObject) { }
4120 
4125  BufferRenderGL& operator = (const cl_mem& rhs)
4126  {
4127  Buffer::operator=(rhs);
4128  return *this;
4129  }
4130 
4134  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4135 
4139  BufferRenderGL& operator = (const BufferRenderGL &buf)
4140  {
4141  Buffer::operator=(buf);
4142  return *this;
4143  }
4144 
4148  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4149 
4153  BufferRenderGL& operator = (BufferRenderGL &&buf)
4154  {
4155  Buffer::operator=(std::move(buf));
4156  return *this;
4157  }
4158 
4161  cl_gl_object_type *type,
4162  cl_GLuint * gl_object_name)
4163  {
4164  return detail::errHandler(
4165  ::clGetGLObjectInfo(object_,type,gl_object_name),
4166  __GET_GL_OBJECT_INFO_ERR);
4167  }
4168 };
4169 
4176 class Image : public Memory
4177 {
4178 protected:
4180  Image() : Memory() { }
4181 
4189  explicit Image(const cl_mem& image, bool retainObject = false) :
4190  Memory(image, retainObject) { }
4191 
4196  Image& operator = (const cl_mem& rhs)
4197  {
4198  Memory::operator=(rhs);
4199  return *this;
4200  }
4201 
4205  Image(const Image& img) : Memory(img) {}
4206 
4210  Image& operator = (const Image &img)
4211  {
4212  Memory::operator=(img);
4213  return *this;
4214  }
4215 
4219  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4220 
4224  Image& operator = (Image &&img)
4225  {
4226  Memory::operator=(std::move(img));
4227  return *this;
4228  }
4229 
4230 
4231 public:
4233  template <typename T>
4234  cl_int getImageInfo(cl_image_info name, T* param) const
4235  {
4236  return detail::errHandler(
4237  detail::getInfo(&::clGetImageInfo, object_, name, param),
4238  __GET_IMAGE_INFO_ERR);
4239  }
4240 
4242  template <cl_int name> typename
4244  getImageInfo(cl_int* err = NULL) const
4245  {
4246  typename detail::param_traits<
4247  detail::cl_image_info, name>::param_type param;
4248  cl_int result = getImageInfo(name, &param);
4249  if (err != NULL) {
4250  *err = result;
4251  }
4252  return param;
4253  }
4254 };
4255 
4256 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4257 
4263 class Image1D : public Image
4264 {
4265 public:
4271  const Context& context,
4272  cl_mem_flags flags,
4273  ImageFormat format,
4274  size_type width,
4275  void* host_ptr = NULL,
4276  cl_int* err = NULL)
4277  {
4278  cl_int error;
4279  cl_image_desc desc =
4280  {
4281  CL_MEM_OBJECT_IMAGE1D,
4282  width,
4283  0, 0, 0, 0, 0, 0, 0, 0
4284  };
4285  object_ = ::clCreateImage(
4286  context(),
4287  flags,
4288  &format,
4289  &desc,
4290  host_ptr,
4291  &error);
4292 
4293  detail::errHandler(error, __CREATE_IMAGE_ERR);
4294  if (err != NULL) {
4295  *err = error;
4296  }
4297  }
4298 
4300  Image1D() { }
4301 
4309  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4310  Image(image1D, retainObject) { }
4311 
4316  Image1D& operator = (const cl_mem& rhs)
4317  {
4318  Image::operator=(rhs);
4319  return *this;
4320  }
4321 
4325  Image1D(const Image1D& img) : Image(img) {}
4326 
4330  Image1D& operator = (const Image1D &img)
4331  {
4332  Image::operator=(img);
4333  return *this;
4334  }
4335 
4339  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4340 
4344  Image1D& operator = (Image1D &&img)
4345  {
4346  Image::operator=(std::move(img));
4347  return *this;
4348  }
4349 
4350 };
4351 
4355 class Image1DBuffer : public Image
4356 {
4357 public:
4358  Image1DBuffer(
4359  const Context& context,
4360  cl_mem_flags flags,
4361  ImageFormat format,
4362  size_type width,
4363  const Buffer &buffer,
4364  cl_int* err = NULL)
4365  {
4366  cl_int error;
4367  cl_image_desc desc =
4368  {
4369  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4370  width,
4371  0, 0, 0, 0, 0, 0, 0,
4372  buffer()
4373  };
4374  object_ = ::clCreateImage(
4375  context(),
4376  flags,
4377  &format,
4378  &desc,
4379  NULL,
4380  &error);
4381 
4382  detail::errHandler(error, __CREATE_IMAGE_ERR);
4383  if (err != NULL) {
4384  *err = error;
4385  }
4386  }
4387 
4388  Image1DBuffer() { }
4389 
4397  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4398  Image(image1D, retainObject) { }
4399 
4400  Image1DBuffer& operator = (const cl_mem& rhs)
4401  {
4402  Image::operator=(rhs);
4403  return *this;
4404  }
4405 
4409  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4410 
4414  Image1DBuffer& operator = (const Image1DBuffer &img)
4415  {
4416  Image::operator=(img);
4417  return *this;
4418  }
4419 
4423  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4424 
4428  Image1DBuffer& operator = (Image1DBuffer &&img)
4429  {
4430  Image::operator=(std::move(img));
4431  return *this;
4432  }
4433 
4434 };
4435 
4439 class Image1DArray : public Image
4440 {
4441 public:
4442  Image1DArray(
4443  const Context& context,
4444  cl_mem_flags flags,
4445  ImageFormat format,
4446  size_type arraySize,
4447  size_type width,
4448  size_type rowPitch,
4449  void* host_ptr = NULL,
4450  cl_int* err = NULL)
4451  {
4452  cl_int error;
4453  cl_image_desc desc =
4454  {
4455  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4456  width,
4457  0, 0, // height, depth (unused)
4458  arraySize,
4459  rowPitch,
4460  0, 0, 0, 0
4461  };
4462  object_ = ::clCreateImage(
4463  context(),
4464  flags,
4465  &format,
4466  &desc,
4467  host_ptr,
4468  &error);
4469 
4470  detail::errHandler(error, __CREATE_IMAGE_ERR);
4471  if (err != NULL) {
4472  *err = error;
4473  }
4474  }
4475 
4476  Image1DArray() { }
4477 
4485  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4486  Image(imageArray, retainObject) { }
4487 
4488 
4489  Image1DArray& operator = (const cl_mem& rhs)
4490  {
4491  Image::operator=(rhs);
4492  return *this;
4493  }
4494 
4498  Image1DArray(const Image1DArray& img) : Image(img) {}
4499 
4503  Image1DArray& operator = (const Image1DArray &img)
4504  {
4505  Image::operator=(img);
4506  return *this;
4507  }
4508 
4512  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4513 
4517  Image1DArray& operator = (Image1DArray &&img)
4518  {
4519  Image::operator=(std::move(img));
4520  return *this;
4521  }
4522 
4523 };
4524 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4525 
4526 
4533 class Image2D : public Image
4534 {
4535 public:
4541  const Context& context,
4542  cl_mem_flags flags,
4543  ImageFormat format,
4544  size_type width,
4545  size_type height,
4546  size_type row_pitch = 0,
4547  void* host_ptr = NULL,
4548  cl_int* err = NULL)
4549  {
4550  cl_int error;
4551  bool useCreateImage;
4552 
4553 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4554  // Run-time decision based on the actual platform
4555  {
4556  cl_uint version = detail::getContextPlatformVersion(context());
4557  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4558  }
4559 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4560  useCreateImage = true;
4561 #else
4562  useCreateImage = false;
4563 #endif
4564 
4565 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4566  if (useCreateImage)
4567  {
4568  cl_image_desc desc =
4569  {
4570  CL_MEM_OBJECT_IMAGE2D,
4571  width,
4572  height,
4573  0, 0, // depth, array size (unused)
4574  row_pitch,
4575  0, 0, 0, 0
4576  };
4577  object_ = ::clCreateImage(
4578  context(),
4579  flags,
4580  &format,
4581  &desc,
4582  host_ptr,
4583  &error);
4584 
4585  detail::errHandler(error, __CREATE_IMAGE_ERR);
4586  if (err != NULL) {
4587  *err = error;
4588  }
4589  }
4590 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4591 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4592  if (!useCreateImage)
4593  {
4594  object_ = ::clCreateImage2D(
4595  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4596 
4597  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4598  if (err != NULL) {
4599  *err = error;
4600  }
4601  }
4602 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4603  }
4604 
4605 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4606 
4612  const Context& context,
4613  ImageFormat format,
4614  const Buffer &sourceBuffer,
4615  size_type width,
4616  size_type height,
4617  size_type row_pitch = 0,
4618  cl_int* err = nullptr)
4619  {
4620  cl_int error;
4621 
4622  cl_image_desc desc =
4623  {
4624  CL_MEM_OBJECT_IMAGE2D,
4625  width,
4626  height,
4627  0, 0, // depth, array size (unused)
4628  row_pitch,
4629  0, 0, 0,
4630  // Use buffer as input to image
4631  sourceBuffer()
4632  };
4633  object_ = ::clCreateImage(
4634  context(),
4635  0, // flags inherited from buffer
4636  &format,
4637  &desc,
4638  nullptr,
4639  &error);
4640 
4641  detail::errHandler(error, __CREATE_IMAGE_ERR);
4642  if (err != nullptr) {
4643  *err = error;
4644  }
4645  }
4646 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4647 
4648 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4649 
4662  const Context& context,
4663  cl_channel_order order,
4664  const Image &sourceImage,
4665  cl_int* err = nullptr)
4666  {
4667  cl_int error;
4668 
4669  // Descriptor fields have to match source image
4670  size_type sourceWidth =
4671  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4672  size_type sourceHeight =
4673  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4674  size_type sourceRowPitch =
4675  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4676  cl_uint sourceNumMIPLevels =
4677  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4678  cl_uint sourceNumSamples =
4679  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4680  cl_image_format sourceFormat =
4681  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4682 
4683  // Update only the channel order.
4684  // Channel format inherited from source.
4685  sourceFormat.image_channel_order = order;
4686  cl_image_desc desc =
4687  {
4688  CL_MEM_OBJECT_IMAGE2D,
4689  sourceWidth,
4690  sourceHeight,
4691  0, 0, // depth (unused), array size (unused)
4692  sourceRowPitch,
4693  0, // slice pitch (unused)
4694  sourceNumMIPLevels,
4695  sourceNumSamples,
4696  // Use buffer as input to image
4697  sourceImage()
4698  };
4699  object_ = ::clCreateImage(
4700  context(),
4701  0, // flags should be inherited from mem_object
4702  &sourceFormat,
4703  &desc,
4704  nullptr,
4705  &error);
4706 
4707  detail::errHandler(error, __CREATE_IMAGE_ERR);
4708  if (err != nullptr) {
4709  *err = error;
4710  }
4711  }
4712 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4713 
4715  Image2D() { }
4716 
4724  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4725  Image(image2D, retainObject) { }
4726 
4731  Image2D& operator = (const cl_mem& rhs)
4732  {
4733  Image::operator=(rhs);
4734  return *this;
4735  }
4736 
4740  Image2D(const Image2D& img) : Image(img) {}
4741 
4745  Image2D& operator = (const Image2D &img)
4746  {
4747  Image::operator=(img);
4748  return *this;
4749  }
4750 
4754  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4755 
4759  Image2D& operator = (Image2D &&img)
4760  {
4761  Image::operator=(std::move(img));
4762  return *this;
4763  }
4764 
4765 };
4766 
4767 
4768 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4769 
4778 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4779 {
4780 public:
4787  const Context& context,
4788  cl_mem_flags flags,
4789  cl_GLenum target,
4790  cl_GLint miplevel,
4791  cl_GLuint texobj,
4792  cl_int * err = NULL)
4793  {
4794  cl_int error;
4795  object_ = ::clCreateFromGLTexture2D(
4796  context(),
4797  flags,
4798  target,
4799  miplevel,
4800  texobj,
4801  &error);
4802 
4803  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4804  if (err != NULL) {
4805  *err = error;
4806  }
4807 
4808  }
4809 
4811  Image2DGL() : Image2D() { }
4812 
4820  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4821  Image2D(image, retainObject) { }
4822 
4827  Image2DGL& operator = (const cl_mem& rhs)
4828  {
4829  Image2D::operator=(rhs);
4830  return *this;
4831  }
4832 
4836  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4837 
4841  Image2DGL& operator = (const Image2DGL &img)
4842  {
4843  Image2D::operator=(img);
4844  return *this;
4845  }
4846 
4850  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
4851 
4855  Image2DGL& operator = (Image2DGL &&img)
4856  {
4857  Image2D::operator=(std::move(img));
4858  return *this;
4859  }
4860 
4861 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
4862 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
4863 
4864 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4865 
4868 class Image2DArray : public Image
4869 {
4870 public:
4871  Image2DArray(
4872  const Context& context,
4873  cl_mem_flags flags,
4874  ImageFormat format,
4875  size_type arraySize,
4876  size_type width,
4877  size_type height,
4878  size_type rowPitch,
4879  size_type slicePitch,
4880  void* host_ptr = NULL,
4881  cl_int* err = NULL)
4882  {
4883  cl_int error;
4884  cl_image_desc desc =
4885  {
4886  CL_MEM_OBJECT_IMAGE2D_ARRAY,
4887  width,
4888  height,
4889  0, // depth (unused)
4890  arraySize,
4891  rowPitch,
4892  slicePitch,
4893  0, 0, 0
4894  };
4895  object_ = ::clCreateImage(
4896  context(),
4897  flags,
4898  &format,
4899  &desc,
4900  host_ptr,
4901  &error);
4902 
4903  detail::errHandler(error, __CREATE_IMAGE_ERR);
4904  if (err != NULL) {
4905  *err = error;
4906  }
4907  }
4908 
4909  Image2DArray() { }
4910 
4918  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
4919 
4920  Image2DArray& operator = (const cl_mem& rhs)
4921  {
4922  Image::operator=(rhs);
4923  return *this;
4924  }
4925 
4929  Image2DArray(const Image2DArray& img) : Image(img) {}
4930 
4934  Image2DArray& operator = (const Image2DArray &img)
4935  {
4936  Image::operator=(img);
4937  return *this;
4938  }
4939 
4943  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4944 
4948  Image2DArray& operator = (Image2DArray &&img)
4949  {
4950  Image::operator=(std::move(img));
4951  return *this;
4952  }
4953 };
4954 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4955 
4962 class Image3D : public Image
4963 {
4964 public:
4970  const Context& context,
4971  cl_mem_flags flags,
4972  ImageFormat format,
4973  size_type width,
4974  size_type height,
4975  size_type depth,
4976  size_type row_pitch = 0,
4977  size_type slice_pitch = 0,
4978  void* host_ptr = NULL,
4979  cl_int* err = NULL)
4980  {
4981  cl_int error;
4982  bool useCreateImage;
4983 
4984 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4985  // Run-time decision based on the actual platform
4986  {
4987  cl_uint version = detail::getContextPlatformVersion(context());
4988  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4989  }
4990 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4991  useCreateImage = true;
4992 #else
4993  useCreateImage = false;
4994 #endif
4995 
4996 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4997  if (useCreateImage)
4998  {
4999  cl_image_desc desc =
5000  {
5001  CL_MEM_OBJECT_IMAGE3D,
5002  width,
5003  height,
5004  depth,
5005  0, // array size (unused)
5006  row_pitch,
5007  slice_pitch,
5008  0, 0, 0
5009  };
5010  object_ = ::clCreateImage(
5011  context(),
5012  flags,
5013  &format,
5014  &desc,
5015  host_ptr,
5016  &error);
5017 
5018  detail::errHandler(error, __CREATE_IMAGE_ERR);
5019  if (err != NULL) {
5020  *err = error;
5021  }
5022  }
5023 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5024 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5025  if (!useCreateImage)
5026  {
5027  object_ = ::clCreateImage3D(
5028  context(), flags, &format, width, height, depth, row_pitch,
5029  slice_pitch, host_ptr, &error);
5030 
5031  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5032  if (err != NULL) {
5033  *err = error;
5034  }
5035  }
5036 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5037  }
5038 
5040  Image3D() : Image() { }
5041 
5049  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5050  Image(image3D, retainObject) { }
5051 
5056  Image3D& operator = (const cl_mem& rhs)
5057  {
5058  Image::operator=(rhs);
5059  return *this;
5060  }
5061 
5065  Image3D(const Image3D& img) : Image(img) {}
5066 
5070  Image3D& operator = (const Image3D &img)
5071  {
5072  Image::operator=(img);
5073  return *this;
5074  }
5075 
5079  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5080 
5084  Image3D& operator = (Image3D &&img)
5085  {
5086  Image::operator=(std::move(img));
5087  return *this;
5088  }
5089 };
5090 
5091 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5092 
5100 class Image3DGL : public Image3D
5101 {
5102 public:
5109  const Context& context,
5110  cl_mem_flags flags,
5111  cl_GLenum target,
5112  cl_GLint miplevel,
5113  cl_GLuint texobj,
5114  cl_int * err = NULL)
5115  {
5116  cl_int error;
5117  object_ = ::clCreateFromGLTexture3D(
5118  context(),
5119  flags,
5120  target,
5121  miplevel,
5122  texobj,
5123  &error);
5124 
5125  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5126  if (err != NULL) {
5127  *err = error;
5128  }
5129  }
5130 
5132  Image3DGL() : Image3D() { }
5133 
5141  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5142  Image3D(image, retainObject) { }
5143 
5148  Image3DGL& operator = (const cl_mem& rhs)
5149  {
5150  Image3D::operator=(rhs);
5151  return *this;
5152  }
5153 
5157  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5158 
5162  Image3DGL& operator = (const Image3DGL &img)
5163  {
5164  Image3D::operator=(img);
5165  return *this;
5166  }
5167 
5171  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5172 
5176  Image3DGL& operator = (Image3DGL &&img)
5177  {
5178  Image3D::operator=(std::move(img));
5179  return *this;
5180  }
5181 };
5182 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5183 
5184 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5185 
5191 class ImageGL : public Image
5192 {
5193 public:
5194  ImageGL(
5195  const Context& context,
5196  cl_mem_flags flags,
5197  cl_GLenum target,
5198  cl_GLint miplevel,
5199  cl_GLuint texobj,
5200  cl_int * err = NULL)
5201  {
5202  cl_int error;
5203  object_ = ::clCreateFromGLTexture(
5204  context(),
5205  flags,
5206  target,
5207  miplevel,
5208  texobj,
5209  &error);
5210 
5211  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5212  if (err != NULL) {
5213  *err = error;
5214  }
5215  }
5216 
5217  ImageGL() : Image() { }
5218 
5226  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5227  Image(image, retainObject) { }
5228 
5229  ImageGL& operator = (const cl_mem& rhs)
5230  {
5231  Image::operator=(rhs);
5232  return *this;
5233  }
5234 
5238  ImageGL(const ImageGL& img) : Image(img) {}
5239 
5243  ImageGL& operator = (const ImageGL &img)
5244  {
5245  Image::operator=(img);
5246  return *this;
5247  }
5248 
5252  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5253 
5257  ImageGL& operator = (ImageGL &&img)
5258  {
5259  Image::operator=(std::move(img));
5260  return *this;
5261  }
5262 };
5263 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5264 
5265 
5266 
5267 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5268 
5274 class Pipe : public Memory
5275 {
5276 public:
5277 
5288  const Context& context,
5289  cl_uint packet_size,
5290  cl_uint max_packets,
5291  cl_int* err = NULL)
5292  {
5293  cl_int error;
5294 
5295  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5296  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5297 
5298  detail::errHandler(error, __CREATE_PIPE_ERR);
5299  if (err != NULL) {
5300  *err = error;
5301  }
5302  }
5303 
5313  cl_uint packet_size,
5314  cl_uint max_packets,
5315  cl_int* err = NULL)
5316  {
5317  cl_int error;
5318 
5319  Context context = Context::getDefault(err);
5320 
5321  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5322  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5323 
5324  detail::errHandler(error, __CREATE_PIPE_ERR);
5325  if (err != NULL) {
5326  *err = error;
5327  }
5328  }
5329 
5331  Pipe() : Memory() { }
5332 
5340  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5341  Memory(pipe, retainObject) { }
5342 
5347  Pipe& operator = (const cl_mem& rhs)
5348  {
5349  Memory::operator=(rhs);
5350  return *this;
5351  }
5352 
5356  Pipe(const Pipe& pipe) : Memory(pipe) {}
5357 
5361  Pipe& operator = (const Pipe &pipe)
5362  {
5363  Memory::operator=(pipe);
5364  return *this;
5365  }
5366 
5370  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5371 
5375  Pipe& operator = (Pipe &&pipe)
5376  {
5377  Memory::operator=(std::move(pipe));
5378  return *this;
5379  }
5380 
5382  template <typename T>
5383  cl_int getInfo(cl_pipe_info name, T* param) const
5384  {
5385  return detail::errHandler(
5386  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5387  __GET_PIPE_INFO_ERR);
5388  }
5389 
5391  template <cl_int name> typename
5393  getInfo(cl_int* err = NULL) const
5394  {
5395  typename detail::param_traits<
5396  detail::cl_pipe_info, name>::param_type param;
5397  cl_int result = getInfo(name, &param);
5398  if (err != NULL) {
5399  *err = result;
5400  }
5401  return param;
5402  }
5403 }; // class Pipe
5404 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5405 
5406 
5415 class Sampler : public detail::Wrapper<cl_sampler>
5416 {
5417 public:
5419  Sampler() { }
5420 
5426  const Context& context,
5427  cl_bool normalized_coords,
5428  cl_addressing_mode addressing_mode,
5429  cl_filter_mode filter_mode,
5430  cl_int* err = NULL)
5431  {
5432  cl_int error;
5433 
5434 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5435  cl_sampler_properties sampler_properties[] = {
5436  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5437  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5438  CL_SAMPLER_FILTER_MODE, filter_mode,
5439  0 };
5440  object_ = ::clCreateSamplerWithProperties(
5441  context(),
5442  sampler_properties,
5443  &error);
5444 
5445  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5446  if (err != NULL) {
5447  *err = error;
5448  }
5449 #else
5450  object_ = ::clCreateSampler(
5451  context(),
5452  normalized_coords,
5453  addressing_mode,
5454  filter_mode,
5455  &error);
5456 
5457  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5458  if (err != NULL) {
5459  *err = error;
5460  }
5461 #endif
5462  }
5463 
5472  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5473  detail::Wrapper<cl_type>(sampler, retainObject) { }
5474 
5480  Sampler& operator = (const cl_sampler& rhs)
5481  {
5483  return *this;
5484  }
5485 
5489  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5490 
5494  Sampler& operator = (const Sampler &sam)
5495  {
5497  return *this;
5498  }
5499 
5503  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5504 
5508  Sampler& operator = (Sampler &&sam)
5509  {
5510  detail::Wrapper<cl_type>::operator=(std::move(sam));
5511  return *this;
5512  }
5513 
5515  template <typename T>
5516  cl_int getInfo(cl_sampler_info name, T* param) const
5517  {
5518  return detail::errHandler(
5519  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5520  __GET_SAMPLER_INFO_ERR);
5521  }
5522 
5524  template <cl_int name> typename
5526  getInfo(cl_int* err = NULL) const
5527  {
5528  typename detail::param_traits<
5529  detail::cl_sampler_info, name>::param_type param;
5530  cl_int result = getInfo(name, &param);
5531  if (err != NULL) {
5532  *err = result;
5533  }
5534  return param;
5535  }
5536 };
5537 
5538 class Program;
5539 class CommandQueue;
5540 class DeviceCommandQueue;
5541 class Kernel;
5542 
5544 class NDRange
5545 {
5546 private:
5547  size_type sizes_[3];
5548  cl_uint dimensions_;
5549 
5550 public:
5553  : dimensions_(0)
5554  {
5555  sizes_[0] = 0;
5556  sizes_[1] = 0;
5557  sizes_[2] = 0;
5558  }
5559 
5561  NDRange(size_type size0)
5562  : dimensions_(1)
5563  {
5564  sizes_[0] = size0;
5565  sizes_[1] = 1;
5566  sizes_[2] = 1;
5567  }
5568 
5570  NDRange(size_type size0, size_type size1)
5571  : dimensions_(2)
5572  {
5573  sizes_[0] = size0;
5574  sizes_[1] = size1;
5575  sizes_[2] = 1;
5576  }
5577 
5579  NDRange(size_type size0, size_type size1, size_type size2)
5580  : dimensions_(3)
5581  {
5582  sizes_[0] = size0;
5583  sizes_[1] = size1;
5584  sizes_[2] = size2;
5585  }
5586 
5591  operator const size_type*() const {
5592  return sizes_;
5593  }
5594 
5596  size_type dimensions() const
5597  {
5598  return dimensions_;
5599  }
5600 
5602  // runtime number of dimensions
5603  size_type size() const
5604  {
5605  return dimensions_*sizeof(size_type);
5606  }
5607 
5608  size_type* get()
5609  {
5610  return sizes_;
5611  }
5612 
5613  const size_type* get() const
5614  {
5615  return sizes_;
5616  }
5617 };
5618 
5620 static const NDRange NullRange;
5621 
5624 {
5625  size_type size_;
5626 };
5627 
5628 namespace detail {
5629 
5630 template <typename T, class Enable = void>
5632 
5633 // Enable for objects that are not subclasses of memory
5634 // Pointers, constants etc
5635 template <typename T>
5636 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5637 {
5638  static size_type size(const T&) { return sizeof(T); }
5639  static const T* ptr(const T& value) { return &value; }
5640 };
5641 
5642 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5643 // and pass that in for safety
5644 template <typename T>
5645 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5646 {
5647  static size_type size(const T&) { return sizeof(cl_mem); }
5648  static const cl_mem* ptr(const T& value) { return &(value()); }
5649 };
5650 
5651 // Specialization for DeviceCommandQueue defined later
5652 
5653 template <>
5655 {
5656  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5657  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5658 };
5659 
5660 }
5662 
5666 inline LocalSpaceArg
5667 Local(size_type size)
5668 {
5669  LocalSpaceArg ret = { size };
5670  return ret;
5671 }
5672 
5681 class Kernel : public detail::Wrapper<cl_kernel>
5682 {
5683 public:
5684  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5685 
5687  Kernel() { }
5688 
5697  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5698  detail::Wrapper<cl_type>(kernel, retainObject) { }
5699 
5705  Kernel& operator = (const cl_kernel& rhs)
5706  {
5708  return *this;
5709  }
5710 
5714  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5715 
5719  Kernel& operator = (const Kernel &kernel)
5720  {
5722  return *this;
5723  }
5724 
5728  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5729 
5733  Kernel& operator = (Kernel &&kernel)
5734  {
5735  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5736  return *this;
5737  }
5738 
5739  template <typename T>
5740  cl_int getInfo(cl_kernel_info name, T* param) const
5741  {
5742  return detail::errHandler(
5743  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5744  __GET_KERNEL_INFO_ERR);
5745  }
5746 
5747  template <cl_int name> typename
5749  getInfo(cl_int* err = NULL) const
5750  {
5751  typename detail::param_traits<
5752  detail::cl_kernel_info, name>::param_type param;
5753  cl_int result = getInfo(name, &param);
5754  if (err != NULL) {
5755  *err = result;
5756  }
5757  return param;
5758  }
5759 
5760 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5761  template <typename T>
5762  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5763  {
5764  return detail::errHandler(
5765  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5766  __GET_KERNEL_ARG_INFO_ERR);
5767  }
5768 
5769  template <cl_int name> typename
5771  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5772  {
5773  typename detail::param_traits<
5774  detail::cl_kernel_arg_info, name>::param_type param;
5775  cl_int result = getArgInfo(argIndex, name, &param);
5776  if (err != NULL) {
5777  *err = result;
5778  }
5779  return param;
5780  }
5781 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5782 
5783  template <typename T>
5784  cl_int getWorkGroupInfo(
5785  const Device& device, cl_kernel_work_group_info name, T* param) const
5786  {
5787  return detail::errHandler(
5788  detail::getInfo(
5789  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5790  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5791  }
5792 
5793  template <cl_int name> typename
5795  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5796  {
5797  typename detail::param_traits<
5798  detail::cl_kernel_work_group_info, name>::param_type param;
5799  cl_int result = getWorkGroupInfo(device, name, &param);
5800  if (err != NULL) {
5801  *err = result;
5802  }
5803  return param;
5804  }
5805 
5806 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5807 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5808  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5809  {
5810  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5811  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5812  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5813 
5814  return detail::errHandler(
5815  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5816  __GET_KERNEL_ARG_INFO_ERR);
5817  }
5818 
5819  template <cl_int name>
5820  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5821  {
5822  size_type param;
5823  cl_int result = getSubGroupInfo(dev, name, range, &param);
5824  if (err != NULL) {
5825  *err = result;
5826  }
5827  return param;
5828  }
5829 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5830 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5831 
5832 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5833 
5835  template<typename T, class D>
5836  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5837  {
5838  return detail::errHandler(
5839  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
5840  __SET_KERNEL_ARGS_ERR);
5841  }
5842 
5845  template<typename T, class Alloc>
5846  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
5847  {
5848  return detail::errHandler(
5849  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
5850  __SET_KERNEL_ARGS_ERR);
5851  }
5852 
5855  template<typename T>
5856  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
5857  setArg(cl_uint index, const T argPtr)
5858  {
5859  return detail::errHandler(
5860  ::clSetKernelArgSVMPointer(object_, index, argPtr),
5861  __SET_KERNEL_ARGS_ERR);
5862  }
5863 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5864 
5867  template <typename T>
5868  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
5869  setArg(cl_uint index, const T &value)
5870  {
5871  return detail::errHandler(
5872  ::clSetKernelArg(
5873  object_,
5874  index,
5877  __SET_KERNEL_ARGS_ERR);
5878  }
5879 
5880  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
5881  {
5882  return detail::errHandler(
5883  ::clSetKernelArg(object_, index, size, argPtr),
5884  __SET_KERNEL_ARGS_ERR);
5885  }
5886 
5887 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5888 
5892  cl_int setSVMPointers(const vector<void*> &pointerList)
5893  {
5894  return detail::errHandler(
5895  ::clSetKernelExecInfo(
5896  object_,
5897  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5898  sizeof(void*)*pointerList.size(),
5899  pointerList.data()));
5900  }
5901 
5906  template<int ArrayLength>
5907  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
5908  {
5909  return detail::errHandler(
5910  ::clSetKernelExecInfo(
5911  object_,
5912  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5913  sizeof(void*)*pointerList.size(),
5914  pointerList.data()));
5915  }
5916 
5928  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
5929  {
5930  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
5931  return detail::errHandler(
5932  ::clSetKernelExecInfo(
5933  object_,
5934  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
5935  sizeof(cl_bool),
5936  &svmEnabled_
5937  )
5938  );
5939  }
5940 
5941  template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
5942  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
5943  {
5944  pointerList[index] = static_cast<void*>(t0.get());
5945  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
5946  }
5947 
5948  template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
5949  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5950  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
5951  {
5952  pointerList[index] = static_cast<void*>(t0);
5953  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
5954  }
5955 
5956  template<int index, int ArrayLength, typename T0, class D>
5957  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
5958  {
5959  pointerList[index] = static_cast<void*>(t0.get());
5960  }
5961 
5962 
5963  template<int index, int ArrayLength, typename T0>
5964  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5965  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
5966  {
5967  pointerList[index] = static_cast<void*>(t0);
5968  }
5969 
5970  template<typename T0, typename... Ts>
5971  cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
5972  {
5973  std::array<void*, 1 + sizeof...(Ts)> pointerList;
5974 
5975  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
5976  return detail::errHandler(
5977  ::clSetKernelExecInfo(
5978  object_,
5979  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5980  sizeof(void*)*(1 + sizeof...(Ts)),
5981  pointerList.data()));
5982  }
5983 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5984 };
5985 
5989 class Program : public detail::Wrapper<cl_program>
5990 {
5991 public:
5992 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5993  typedef vector<vector<unsigned char>> Binaries;
5994  typedef vector<string> Sources;
5995 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5996  typedef vector<std::pair<const void*, size_type> > Binaries;
5997  typedef vector<std::pair<const char*, size_type> > Sources;
5998 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5999 
6000  Program(
6001  const string& source,
6002  bool build = false,
6003  cl_int* err = NULL)
6004  {
6005  cl_int error;
6006 
6007  const char * strings = source.c_str();
6008  const size_type length = source.size();
6009 
6010  Context context = Context::getDefault(err);
6011 
6012  object_ = ::clCreateProgramWithSource(
6013  context(), (cl_uint)1, &strings, &length, &error);
6014 
6015  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6016 
6017  if (error == CL_SUCCESS && build) {
6018 
6019  error = ::clBuildProgram(
6020  object_,
6021  0,
6022  NULL,
6023 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6024  "-cl-std=CL2.0",
6025 #else
6026  "",
6027 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6028  NULL,
6029  NULL);
6030 
6031  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6032  }
6033 
6034  if (err != NULL) {
6035  *err = error;
6036  }
6037  }
6038 
6039  Program(
6040  const Context& context,
6041  const string& source,
6042  bool build = false,
6043  cl_int* err = NULL)
6044  {
6045  cl_int error;
6046 
6047  const char * strings = source.c_str();
6048  const size_type length = source.size();
6049 
6050  object_ = ::clCreateProgramWithSource(
6051  context(), (cl_uint)1, &strings, &length, &error);
6052 
6053  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6054 
6055  if (error == CL_SUCCESS && build) {
6056  error = ::clBuildProgram(
6057  object_,
6058  0,
6059  NULL,
6060 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6061  "-cl-std=CL2.0",
6062 #else
6063  "",
6064 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6065  NULL,
6066  NULL);
6067 
6068  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6069  }
6070 
6071  if (err != NULL) {
6072  *err = error;
6073  }
6074  }
6075 
6081  const Sources& sources,
6082  cl_int* err = NULL)
6083  {
6084  cl_int error;
6085  Context context = Context::getDefault(err);
6086 
6087  const size_type n = (size_type)sources.size();
6088 
6089  vector<size_type> lengths(n);
6090  vector<const char*> strings(n);
6091 
6092  for (size_type i = 0; i < n; ++i) {
6093 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6094  strings[i] = sources[(int)i].data();
6095  lengths[i] = sources[(int)i].length();
6096 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6097  strings[i] = sources[(int)i].first;
6098  lengths[i] = sources[(int)i].second;
6099 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6100  }
6101 
6102  object_ = ::clCreateProgramWithSource(
6103  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6104 
6105  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6106  if (err != NULL) {
6107  *err = error;
6108  }
6109  }
6110 
6116  const Context& context,
6117  const Sources& sources,
6118  cl_int* err = NULL)
6119  {
6120  cl_int error;
6121 
6122  const size_type n = (size_type)sources.size();
6123 
6124  vector<size_type> lengths(n);
6125  vector<const char*> strings(n);
6126 
6127  for (size_type i = 0; i < n; ++i) {
6128 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6129  strings[i] = sources[(int)i].data();
6130  lengths[i] = sources[(int)i].length();
6131 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6132  strings[i] = sources[(int)i].first;
6133  lengths[i] = sources[(int)i].second;
6134 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6135  }
6136 
6137  object_ = ::clCreateProgramWithSource(
6138  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6139 
6140  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6141  if (err != NULL) {
6142  *err = error;
6143  }
6144  }
6145 
6166  const Context& context,
6167  const vector<Device>& devices,
6168  const Binaries& binaries,
6169  vector<cl_int>* binaryStatus = NULL,
6170  cl_int* err = NULL)
6171  {
6172  cl_int error;
6173 
6174  const size_type numDevices = devices.size();
6175 
6176  // Catch size mismatch early and return
6177  if(binaries.size() != numDevices) {
6178  error = CL_INVALID_VALUE;
6179  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6180  if (err != NULL) {
6181  *err = error;
6182  }
6183  return;
6184  }
6185 
6186 
6187  vector<size_type> lengths(numDevices);
6188  vector<const unsigned char*> images(numDevices);
6189 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6190  for (size_type i = 0; i < numDevices; ++i) {
6191  images[i] = binaries[i].data();
6192  lengths[i] = binaries[(int)i].size();
6193  }
6194 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6195  for (size_type i = 0; i < numDevices; ++i) {
6196  images[i] = (const unsigned char*)binaries[i].first;
6197  lengths[i] = binaries[(int)i].second;
6198  }
6199 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6200 
6201  vector<cl_device_id> deviceIDs(numDevices);
6202  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6203  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6204  }
6205 
6206  if(binaryStatus) {
6207  binaryStatus->resize(numDevices);
6208  }
6209 
6210  object_ = ::clCreateProgramWithBinary(
6211  context(), (cl_uint) devices.size(),
6212  deviceIDs.data(),
6213  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6214  ? &binaryStatus->front()
6215  : NULL, &error);
6216 
6217  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6218  if (err != NULL) {
6219  *err = error;
6220  }
6221  }
6222 
6223 
6224 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6225 
6230  const Context& context,
6231  const vector<Device>& devices,
6232  const string& kernelNames,
6233  cl_int* err = NULL)
6234  {
6235  cl_int error;
6236 
6237 
6238  size_type numDevices = devices.size();
6239  vector<cl_device_id> deviceIDs(numDevices);
6240  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6241  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6242  }
6243 
6244  object_ = ::clCreateProgramWithBuiltInKernels(
6245  context(),
6246  (cl_uint) devices.size(),
6247  deviceIDs.data(),
6248  kernelNames.c_str(),
6249  &error);
6250 
6251  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6252  if (err != NULL) {
6253  *err = error;
6254  }
6255  }
6256 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6257 
6258  Program() { }
6259 
6260 
6267  explicit Program(const cl_program& program, bool retainObject = false) :
6268  detail::Wrapper<cl_type>(program, retainObject) { }
6269 
6270  Program& operator = (const cl_program& rhs)
6271  {
6273  return *this;
6274  }
6275 
6279  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6280 
6284  Program& operator = (const Program &program)
6285  {
6287  return *this;
6288  }
6289 
6293  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6294 
6298  Program& operator = (Program &&program)
6299  {
6300  detail::Wrapper<cl_type>::operator=(std::move(program));
6301  return *this;
6302  }
6303 
6304  cl_int build(
6305  const vector<Device>& devices,
6306  const char* options = NULL,
6307  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6308  void* data = NULL) const
6309  {
6310  size_type numDevices = devices.size();
6311  vector<cl_device_id> deviceIDs(numDevices);
6312 
6313  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6314  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6315  }
6316 
6317  cl_int buildError = ::clBuildProgram(
6318  object_,
6319  (cl_uint)
6320  devices.size(),
6321  deviceIDs.data(),
6322  options,
6323  notifyFptr,
6324  data);
6325 
6326  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6327  }
6328 
6329  cl_int build(
6330  const char* options = NULL,
6331  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6332  void* data = NULL) const
6333  {
6334  cl_int buildError = ::clBuildProgram(
6335  object_,
6336  0,
6337  NULL,
6338  options,
6339  notifyFptr,
6340  data);
6341 
6342 
6343  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6344  }
6345 
6346 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6347  cl_int compile(
6348  const char* options = NULL,
6349  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6350  void* data = NULL) const
6351  {
6352  cl_int error = ::clCompileProgram(
6353  object_,
6354  0,
6355  NULL,
6356  options,
6357  0,
6358  NULL,
6359  NULL,
6360  notifyFptr,
6361  data);
6362  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6363  }
6364 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6365 
6366  template <typename T>
6367  cl_int getInfo(cl_program_info name, T* param) const
6368  {
6369  return detail::errHandler(
6370  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6371  __GET_PROGRAM_INFO_ERR);
6372  }
6373 
6374  template <cl_int name> typename
6376  getInfo(cl_int* err = NULL) const
6377  {
6378  typename detail::param_traits<
6379  detail::cl_program_info, name>::param_type param;
6380  cl_int result = getInfo(name, &param);
6381  if (err != NULL) {
6382  *err = result;
6383  }
6384  return param;
6385  }
6386 
6387  template <typename T>
6388  cl_int getBuildInfo(
6389  const Device& device, cl_program_build_info name, T* param) const
6390  {
6391  return detail::errHandler(
6392  detail::getInfo(
6393  &::clGetProgramBuildInfo, object_, device(), name, param),
6394  __GET_PROGRAM_BUILD_INFO_ERR);
6395  }
6396 
6397  template <cl_int name> typename
6399  getBuildInfo(const Device& device, cl_int* err = NULL) const
6400  {
6401  typename detail::param_traits<
6402  detail::cl_program_build_info, name>::param_type param;
6403  cl_int result = getBuildInfo(device, name, &param);
6404  if (err != NULL) {
6405  *err = result;
6406  }
6407  return param;
6408  }
6409 
6415  template <cl_int name>
6416  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6417  getBuildInfo(cl_int *err = NULL) const
6418  {
6419  cl_int result = CL_SUCCESS;
6420 
6421  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6422  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6423  devInfo;
6424 
6425  // If there was an initial error from getInfo return the error
6426  if (result != CL_SUCCESS) {
6427  if (err != NULL) {
6428  *err = result;
6429  }
6430  return devInfo;
6431  }
6432 
6433  for (const cl::Device &d : devs) {
6434  typename detail::param_traits<
6435  detail::cl_program_build_info, name>::param_type param;
6436  result = getBuildInfo(d, name, &param);
6437  devInfo.push_back(
6439  (d, param));
6440  if (result != CL_SUCCESS) {
6441  // On error, leave the loop and return the error code
6442  break;
6443  }
6444  }
6445  if (err != NULL) {
6446  *err = result;
6447  }
6448  if (result != CL_SUCCESS) {
6449  devInfo.clear();
6450  }
6451  return devInfo;
6452  }
6453 
6454  cl_int createKernels(vector<Kernel>* kernels)
6455  {
6456  cl_uint numKernels;
6457  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6458  if (err != CL_SUCCESS) {
6459  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6460  }
6461 
6462  vector<cl_kernel> value(numKernels);
6463 
6464  err = ::clCreateKernelsInProgram(
6465  object_, numKernels, value.data(), NULL);
6466  if (err != CL_SUCCESS) {
6467  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6468  }
6469 
6470  if (kernels) {
6471  kernels->resize(value.size());
6472 
6473  // Assign to param, constructing with retain behaviour
6474  // to correctly capture each underlying CL object
6475  for (size_type i = 0; i < value.size(); i++) {
6476  // We do not need to retain because this kernel is being created
6477  // by the runtime
6478  (*kernels)[i] = Kernel(value[i], false);
6479  }
6480  }
6481  return CL_SUCCESS;
6482  }
6483 };
6484 
6485 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6486 inline Program linkProgram(
6487  Program input1,
6488  Program input2,
6489  const char* options = NULL,
6490  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6491  void* data = NULL,
6492  cl_int* err = NULL)
6493 {
6494  cl_int error_local = CL_SUCCESS;
6495 
6496  cl_program programs[2] = { input1(), input2() };
6497 
6498  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6499  if(error_local!=CL_SUCCESS) {
6500  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6501  }
6502 
6503  cl_program prog = ::clLinkProgram(
6504  ctx(),
6505  0,
6506  NULL,
6507  options,
6508  2,
6509  programs,
6510  notifyFptr,
6511  data,
6512  &error_local);
6513 
6514  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6515  if (err != NULL) {
6516  *err = error_local;
6517  }
6518 
6519  return Program(prog);
6520 }
6521 
6522 inline Program linkProgram(
6523  vector<Program> inputPrograms,
6524  const char* options = NULL,
6525  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6526  void* data = NULL,
6527  cl_int* err = NULL)
6528 {
6529  cl_int error_local = CL_SUCCESS;
6530 
6531  vector<cl_program> programs(inputPrograms.size());
6532 
6533  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6534  programs[i] = inputPrograms[i]();
6535  }
6536 
6537  Context ctx;
6538  if(inputPrograms.size() > 0) {
6539  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6540  if(error_local!=CL_SUCCESS) {
6541  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6542  }
6543  }
6544  cl_program prog = ::clLinkProgram(
6545  ctx(),
6546  0,
6547  NULL,
6548  options,
6549  (cl_uint)inputPrograms.size(),
6550  programs.data(),
6551  notifyFptr,
6552  data,
6553  &error_local);
6554 
6555  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6556  if (err != NULL) {
6557  *err = error_local;
6558  }
6559 
6560  return Program(prog, false);
6561 }
6562 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6563 
6564 // Template specialization for CL_PROGRAM_BINARIES
6565 template <>
6566 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6567 {
6568  if (name != CL_PROGRAM_BINARIES) {
6569  return CL_INVALID_VALUE;
6570  }
6571  if (param) {
6572  // Resize the parameter array appropriately for each allocation
6573  // and pass down to the helper
6574 
6575  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6576  size_type numBinaries = sizes.size();
6577 
6578  // Resize the parameter array and constituent arrays
6579  param->resize(numBinaries);
6580  for (size_type i = 0; i < numBinaries; ++i) {
6581  (*param)[i].resize(sizes[i]);
6582  }
6583 
6584  return detail::errHandler(
6585  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6586  __GET_PROGRAM_INFO_ERR);
6587  }
6588 
6589  return CL_SUCCESS;
6590 }
6591 
6592 template<>
6593 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6594 {
6595  vector<vector<unsigned char>> binariesVectors;
6596 
6597  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6598  if (err != NULL) {
6599  *err = result;
6600  }
6601  return binariesVectors;
6602 }
6603 
6604 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6605 {
6606  cl_int error;
6607 
6608  object_ = ::clCreateKernel(program(), name, &error);
6609  detail::errHandler(error, __CREATE_KERNEL_ERR);
6610 
6611  if (err != NULL) {
6612  *err = error;
6613  }
6614 
6615 }
6616 
6617 enum class QueueProperties : cl_command_queue_properties
6618 {
6619  None = 0,
6620  Profiling = CL_QUEUE_PROFILING_ENABLE,
6621  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6622 };
6623 
6624 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6625 {
6626  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6627 }
6628 
6632 class CommandQueue : public detail::Wrapper<cl_command_queue>
6633 {
6634 private:
6635  static std::once_flag default_initialized_;
6636  static CommandQueue default_;
6637  static cl_int default_error_;
6638 
6644  static void makeDefault()
6645  {
6646  /* We don't want to throw an error from this function, so we have to
6647  * catch and set the error flag.
6648  */
6649 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6650  try
6651 #endif
6652  {
6653  int error;
6654  Context context = Context::getDefault(&error);
6655 
6656  if (error != CL_SUCCESS) {
6657  default_error_ = error;
6658  }
6659  else {
6660  Device device = Device::getDefault();
6661  default_ = CommandQueue(context, device, 0, &default_error_);
6662  }
6663  }
6664 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6665  catch (cl::Error &e) {
6666  default_error_ = e.err();
6667  }
6668 #endif
6669  }
6670 
6676  static void makeDefaultProvided(const CommandQueue &c) {
6677  default_ = c;
6678  }
6679 
6680 public:
6681 #ifdef CL_HPP_UNIT_TEST_ENABLE
6682 
6688  static void unitTestClearDefault() {
6689  default_ = CommandQueue();
6690  }
6691 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
6692 
6693 
6699  cl_command_queue_properties properties,
6700  cl_int* err = NULL)
6701  {
6702  cl_int error;
6703 
6704  Context context = Context::getDefault(&error);
6705  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6706 
6707  if (error != CL_SUCCESS) {
6708  if (err != NULL) {
6709  *err = error;
6710  }
6711  }
6712  else {
6713  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6714  bool useWithProperties;
6715 
6716 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6717  // Run-time decision based on the actual platform
6718  {
6719  cl_uint version = detail::getContextPlatformVersion(context());
6720  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6721  }
6722 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6723  useWithProperties = true;
6724 #else
6725  useWithProperties = false;
6726 #endif
6727 
6728 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6729  if (useWithProperties) {
6730  cl_queue_properties queue_properties[] = {
6731  CL_QUEUE_PROPERTIES, properties, 0 };
6732  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6733  object_ = ::clCreateCommandQueueWithProperties(
6734  context(), device(), queue_properties, &error);
6735  }
6736  else {
6737  error = CL_INVALID_QUEUE_PROPERTIES;
6738  }
6739 
6740  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6741  if (err != NULL) {
6742  *err = error;
6743  }
6744  }
6745 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6746 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6747  if (!useWithProperties) {
6748  object_ = ::clCreateCommandQueue(
6749  context(), device(), properties, &error);
6750 
6751  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6752  if (err != NULL) {
6753  *err = error;
6754  }
6755  }
6756 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6757  }
6758  }
6759 
6765  QueueProperties properties,
6766  cl_int* err = NULL)
6767  {
6768  cl_int error;
6769 
6770  Context context = Context::getDefault(&error);
6771  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6772 
6773  if (error != CL_SUCCESS) {
6774  if (err != NULL) {
6775  *err = error;
6776  }
6777  }
6778  else {
6779  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6780  bool useWithProperties;
6781 
6782 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6783  // Run-time decision based on the actual platform
6784  {
6785  cl_uint version = detail::getContextPlatformVersion(context());
6786  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6787  }
6788 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6789  useWithProperties = true;
6790 #else
6791  useWithProperties = false;
6792 #endif
6793 
6794 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6795  if (useWithProperties) {
6796  cl_queue_properties queue_properties[] = {
6797  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6798 
6799  object_ = ::clCreateCommandQueueWithProperties(
6800  context(), device(), queue_properties, &error);
6801 
6802  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6803  if (err != NULL) {
6804  *err = error;
6805  }
6806  }
6807 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6808 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6809  if (!useWithProperties) {
6810  object_ = ::clCreateCommandQueue(
6811  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
6812 
6813  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6814  if (err != NULL) {
6815  *err = error;
6816  }
6817  }
6818 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6819 
6820  }
6821  }
6822 
6827  explicit CommandQueue(
6828  const Context& context,
6829  cl_command_queue_properties properties = 0,
6830  cl_int* err = NULL)
6831  {
6832  cl_int error;
6833  bool useWithProperties;
6834  vector<cl::Device> devices;
6835  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6836 
6837  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6838 
6839  if (error != CL_SUCCESS)
6840  {
6841  if (err != NULL) {
6842  *err = error;
6843  }
6844  return;
6845  }
6846 
6847 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6848  // Run-time decision based on the actual platform
6849  {
6850  cl_uint version = detail::getContextPlatformVersion(context());
6851  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6852  }
6853 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6854  useWithProperties = true;
6855 #else
6856  useWithProperties = false;
6857 #endif
6858 
6859 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6860  if (useWithProperties) {
6861  cl_queue_properties queue_properties[] = {
6862  CL_QUEUE_PROPERTIES, properties, 0 };
6863  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6864  object_ = ::clCreateCommandQueueWithProperties(
6865  context(), devices[0](), queue_properties, &error);
6866  }
6867  else {
6868  error = CL_INVALID_QUEUE_PROPERTIES;
6869  }
6870 
6871  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6872  if (err != NULL) {
6873  *err = error;
6874  }
6875  }
6876 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6877 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6878  if (!useWithProperties) {
6879  object_ = ::clCreateCommandQueue(
6880  context(), devices[0](), properties, &error);
6881 
6882  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6883  if (err != NULL) {
6884  *err = error;
6885  }
6886  }
6887 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6888  }
6889 
6894  explicit CommandQueue(
6895  const Context& context,
6896  QueueProperties properties,
6897  cl_int* err = NULL)
6898  {
6899  cl_int error;
6900  bool useWithProperties;
6901  vector<cl::Device> devices;
6902  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6903 
6904  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6905 
6906  if (error != CL_SUCCESS)
6907  {
6908  if (err != NULL) {
6909  *err = error;
6910  }
6911  return;
6912  }
6913 
6914 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6915  // Run-time decision based on the actual platform
6916  {
6917  cl_uint version = detail::getContextPlatformVersion(context());
6918  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6919  }
6920 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6921  useWithProperties = true;
6922 #else
6923  useWithProperties = false;
6924 #endif
6925 
6926 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6927  if (useWithProperties) {
6928  cl_queue_properties queue_properties[] = {
6929  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6930  object_ = ::clCreateCommandQueueWithProperties(
6931  context(), devices[0](), queue_properties, &error);
6932 
6933  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6934  if (err != NULL) {
6935  *err = error;
6936  }
6937  }
6938 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6939 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6940  if (!useWithProperties) {
6941  object_ = ::clCreateCommandQueue(
6942  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
6943 
6944  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6945  if (err != NULL) {
6946  *err = error;
6947  }
6948  }
6949 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6950  }
6951 
6957  const Context& context,
6958  const Device& device,
6959  cl_command_queue_properties properties = 0,
6960  cl_int* err = NULL)
6961  {
6962  cl_int error;
6963  bool useWithProperties;
6964 
6965 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6966  // Run-time decision based on the actual platform
6967  {
6968  cl_uint version = detail::getContextPlatformVersion(context());
6969  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6970  }
6971 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6972  useWithProperties = true;
6973 #else
6974  useWithProperties = false;
6975 #endif
6976 
6977 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6978  if (useWithProperties) {
6979  cl_queue_properties queue_properties[] = {
6980  CL_QUEUE_PROPERTIES, properties, 0 };
6981  object_ = ::clCreateCommandQueueWithProperties(
6982  context(), device(), queue_properties, &error);
6983 
6984  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6985  if (err != NULL) {
6986  *err = error;
6987  }
6988  }
6989 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6990 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6991  if (!useWithProperties) {
6992  object_ = ::clCreateCommandQueue(
6993  context(), device(), properties, &error);
6994 
6995  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6996  if (err != NULL) {
6997  *err = error;
6998  }
6999  }
7000 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7001  }
7002 
7008  const Context& context,
7009  const Device& device,
7010  QueueProperties properties,
7011  cl_int* err = NULL)
7012  {
7013  cl_int error;
7014  bool useWithProperties;
7015 
7016 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7017  // Run-time decision based on the actual platform
7018  {
7019  cl_uint version = detail::getContextPlatformVersion(context());
7020  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7021  }
7022 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7023  useWithProperties = true;
7024 #else
7025  useWithProperties = false;
7026 #endif
7027 
7028 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7029  if (useWithProperties) {
7030  cl_queue_properties queue_properties[] = {
7031  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7032  object_ = ::clCreateCommandQueueWithProperties(
7033  context(), device(), queue_properties, &error);
7034 
7035  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7036  if (err != NULL) {
7037  *err = error;
7038  }
7039  }
7040 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7041 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7042  if (!useWithProperties) {
7043  object_ = ::clCreateCommandQueue(
7044  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7045 
7046  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7047  if (err != NULL) {
7048  *err = error;
7049  }
7050  }
7051 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7052  }
7053 
7054  static CommandQueue getDefault(cl_int * err = NULL)
7055  {
7056  std::call_once(default_initialized_, makeDefault);
7057 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7058  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7059 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
7060  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
7061 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7062  if (err != NULL) {
7063  *err = default_error_;
7064  }
7065  return default_;
7066  }
7067 
7075  static CommandQueue setDefault(const CommandQueue &default_queue)
7076  {
7077  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
7078  detail::errHandler(default_error_);
7079  return default_;
7080  }
7081 
7082  CommandQueue() { }
7083 
7084 
7091  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
7092  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
7093 
7094  CommandQueue& operator = (const cl_command_queue& rhs)
7095  {
7097  return *this;
7098  }
7099 
7103  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
7104 
7108  CommandQueue& operator = (const CommandQueue &queue)
7109  {
7111  return *this;
7112  }
7113 
7117  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
7118 
7122  CommandQueue& operator = (CommandQueue &&queue)
7123  {
7124  detail::Wrapper<cl_type>::operator=(std::move(queue));
7125  return *this;
7126  }
7127 
7128  template <typename T>
7129  cl_int getInfo(cl_command_queue_info name, T* param) const
7130  {
7131  return detail::errHandler(
7132  detail::getInfo(
7133  &::clGetCommandQueueInfo, object_, name, param),
7134  __GET_COMMAND_QUEUE_INFO_ERR);
7135  }
7136 
7137  template <cl_int name> typename
7139  getInfo(cl_int* err = NULL) const
7140  {
7141  typename detail::param_traits<
7142  detail::cl_command_queue_info, name>::param_type param;
7143  cl_int result = getInfo(name, &param);
7144  if (err != NULL) {
7145  *err = result;
7146  }
7147  return param;
7148  }
7149 
7150  cl_int enqueueReadBuffer(
7151  const Buffer& buffer,
7152  cl_bool blocking,
7153  size_type offset,
7154  size_type size,
7155  void* ptr,
7156  const vector<Event>* events = NULL,
7157  Event* event = NULL) const
7158  {
7159  cl_event tmp;
7160  cl_int err = detail::errHandler(
7161  ::clEnqueueReadBuffer(
7162  object_, buffer(), blocking, offset, size,
7163  ptr,
7164  (events != NULL) ? (cl_uint) events->size() : 0,
7165  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7166  (event != NULL) ? &tmp : NULL),
7167  __ENQUEUE_READ_BUFFER_ERR);
7168 
7169  if (event != NULL && err == CL_SUCCESS)
7170  *event = tmp;
7171 
7172  return err;
7173  }
7174 
7175  cl_int enqueueWriteBuffer(
7176  const Buffer& buffer,
7177  cl_bool blocking,
7178  size_type offset,
7179  size_type size,
7180  const void* ptr,
7181  const vector<Event>* events = NULL,
7182  Event* event = NULL) const
7183  {
7184  cl_event tmp;
7185  cl_int err = detail::errHandler(
7186  ::clEnqueueWriteBuffer(
7187  object_, buffer(), blocking, offset, size,
7188  ptr,
7189  (events != NULL) ? (cl_uint) events->size() : 0,
7190  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7191  (event != NULL) ? &tmp : NULL),
7192  __ENQUEUE_WRITE_BUFFER_ERR);
7193 
7194  if (event != NULL && err == CL_SUCCESS)
7195  *event = tmp;
7196 
7197  return err;
7198  }
7199 
7200  cl_int enqueueCopyBuffer(
7201  const Buffer& src,
7202  const Buffer& dst,
7203  size_type src_offset,
7204  size_type dst_offset,
7205  size_type size,
7206  const vector<Event>* events = NULL,
7207  Event* event = NULL) const
7208  {
7209  cl_event tmp;
7210  cl_int err = detail::errHandler(
7211  ::clEnqueueCopyBuffer(
7212  object_, src(), dst(), src_offset, dst_offset, size,
7213  (events != NULL) ? (cl_uint) events->size() : 0,
7214  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7215  (event != NULL) ? &tmp : NULL),
7216  __ENQEUE_COPY_BUFFER_ERR);
7217 
7218  if (event != NULL && err == CL_SUCCESS)
7219  *event = tmp;
7220 
7221  return err;
7222  }
7223 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
7224  cl_int enqueueReadBufferRect(
7225  const Buffer& buffer,
7226  cl_bool blocking,
7227  const array<size_type, 3>& buffer_offset,
7228  const array<size_type, 3>& host_offset,
7229  const array<size_type, 3>& region,
7230  size_type buffer_row_pitch,
7231  size_type buffer_slice_pitch,
7232  size_type host_row_pitch,
7233  size_type host_slice_pitch,
7234  void *ptr,
7235  const vector<Event>* events = NULL,
7236  Event* event = NULL) const
7237  {
7238  cl_event tmp;
7239  cl_int err = detail::errHandler(
7240  ::clEnqueueReadBufferRect(
7241  object_,
7242  buffer(),
7243  blocking,
7244  buffer_offset.data(),
7245  host_offset.data(),
7246  region.data(),
7247  buffer_row_pitch,
7248  buffer_slice_pitch,
7249  host_row_pitch,
7250  host_slice_pitch,
7251  ptr,
7252  (events != NULL) ? (cl_uint) events->size() : 0,
7253  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7254  (event != NULL) ? &tmp : NULL),
7255  __ENQUEUE_READ_BUFFER_RECT_ERR);
7256 
7257  if (event != NULL && err == CL_SUCCESS)
7258  *event = tmp;
7259 
7260  return err;
7261  }
7262 
7263  cl_int enqueueWriteBufferRect(
7264  const Buffer& buffer,
7265  cl_bool blocking,
7266  const array<size_type, 3>& buffer_offset,
7267  const array<size_type, 3>& host_offset,
7268  const array<size_type, 3>& region,
7269  size_type buffer_row_pitch,
7270  size_type buffer_slice_pitch,
7271  size_type host_row_pitch,
7272  size_type host_slice_pitch,
7273  const void *ptr,
7274  const vector<Event>* events = NULL,
7275  Event* event = NULL) const
7276  {
7277  cl_event tmp;
7278  cl_int err = detail::errHandler(
7279  ::clEnqueueWriteBufferRect(
7280  object_,
7281  buffer(),
7282  blocking,
7283  buffer_offset.data(),
7284  host_offset.data(),
7285  region.data(),
7286  buffer_row_pitch,
7287  buffer_slice_pitch,
7288  host_row_pitch,
7289  host_slice_pitch,
7290  ptr,
7291  (events != NULL) ? (cl_uint) events->size() : 0,
7292  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7293  (event != NULL) ? &tmp : NULL),
7294  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7295 
7296  if (event != NULL && err == CL_SUCCESS)
7297  *event = tmp;
7298 
7299  return err;
7300  }
7301 
7302  cl_int enqueueCopyBufferRect(
7303  const Buffer& src,
7304  const Buffer& dst,
7305  const array<size_type, 3>& src_origin,
7306  const array<size_type, 3>& dst_origin,
7307  const array<size_type, 3>& region,
7308  size_type src_row_pitch,
7309  size_type src_slice_pitch,
7310  size_type dst_row_pitch,
7311  size_type dst_slice_pitch,
7312  const vector<Event>* events = NULL,
7313  Event* event = NULL) const
7314  {
7315  cl_event tmp;
7316  cl_int err = detail::errHandler(
7317  ::clEnqueueCopyBufferRect(
7318  object_,
7319  src(),
7320  dst(),
7321  src_origin.data(),
7322  dst_origin.data(),
7323  region.data(),
7324  src_row_pitch,
7325  src_slice_pitch,
7326  dst_row_pitch,
7327  dst_slice_pitch,
7328  (events != NULL) ? (cl_uint) events->size() : 0,
7329  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7330  (event != NULL) ? &tmp : NULL),
7331  __ENQEUE_COPY_BUFFER_RECT_ERR);
7332 
7333  if (event != NULL && err == CL_SUCCESS)
7334  *event = tmp;
7335 
7336  return err;
7337  }
7338 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
7339 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7340 
7351  template<typename PatternType>
7353  const Buffer& buffer,
7354  PatternType pattern,
7355  size_type offset,
7356  size_type size,
7357  const vector<Event>* events = NULL,
7358  Event* event = NULL) const
7359  {
7360  cl_event tmp;
7361  cl_int err = detail::errHandler(
7362  ::clEnqueueFillBuffer(
7363  object_,
7364  buffer(),
7365  static_cast<void*>(&pattern),
7366  sizeof(PatternType),
7367  offset,
7368  size,
7369  (events != NULL) ? (cl_uint) events->size() : 0,
7370  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7371  (event != NULL) ? &tmp : NULL),
7372  __ENQUEUE_FILL_BUFFER_ERR);
7373 
7374  if (event != NULL && err == CL_SUCCESS)
7375  *event = tmp;
7376 
7377  return err;
7378  }
7379 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7380 
7381  cl_int enqueueReadImage(
7382  const Image& image,
7383  cl_bool blocking,
7384  const array<size_type, 3>& origin,
7385  const array<size_type, 3>& region,
7386  size_type row_pitch,
7387  size_type slice_pitch,
7388  void* ptr,
7389  const vector<Event>* events = NULL,
7390  Event* event = NULL) const
7391  {
7392  cl_event tmp;
7393  cl_int err = detail::errHandler(
7394  ::clEnqueueReadImage(
7395  object_,
7396  image(),
7397  blocking,
7398  origin.data(),
7399  region.data(),
7400  row_pitch,
7401  slice_pitch,
7402  ptr,
7403  (events != NULL) ? (cl_uint) events->size() : 0,
7404  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7405  (event != NULL) ? &tmp : NULL),
7406  __ENQUEUE_READ_IMAGE_ERR);
7407 
7408  if (event != NULL && err == CL_SUCCESS)
7409  *event = tmp;
7410 
7411  return err;
7412  }
7413 
7414  cl_int enqueueWriteImage(
7415  const Image& image,
7416  cl_bool blocking,
7417  const array<size_type, 3>& origin,
7418  const array<size_type, 3>& region,
7419  size_type row_pitch,
7420  size_type slice_pitch,
7421  const void* ptr,
7422  const vector<Event>* events = NULL,
7423  Event* event = NULL) const
7424  {
7425  cl_event tmp;
7426  cl_int err = detail::errHandler(
7427  ::clEnqueueWriteImage(
7428  object_,
7429  image(),
7430  blocking,
7431  origin.data(),
7432  region.data(),
7433  row_pitch,
7434  slice_pitch,
7435  ptr,
7436  (events != NULL) ? (cl_uint) events->size() : 0,
7437  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7438  (event != NULL) ? &tmp : NULL),
7439  __ENQUEUE_WRITE_IMAGE_ERR);
7440 
7441  if (event != NULL && err == CL_SUCCESS)
7442  *event = tmp;
7443 
7444  return err;
7445  }
7446 
7447  cl_int enqueueCopyImage(
7448  const Image& src,
7449  const Image& dst,
7450  const array<size_type, 3>& src_origin,
7451  const array<size_type, 3>& dst_origin,
7452  const array<size_type, 3>& region,
7453  const vector<Event>* events = NULL,
7454  Event* event = NULL) const
7455  {
7456  cl_event tmp;
7457  cl_int err = detail::errHandler(
7458  ::clEnqueueCopyImage(
7459  object_,
7460  src(),
7461  dst(),
7462  src_origin.data(),
7463  dst_origin.data(),
7464  region.data(),
7465  (events != NULL) ? (cl_uint) events->size() : 0,
7466  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7467  (event != NULL) ? &tmp : NULL),
7468  __ENQUEUE_COPY_IMAGE_ERR);
7469 
7470  if (event != NULL && err == CL_SUCCESS)
7471  *event = tmp;
7472 
7473  return err;
7474  }
7475 
7476 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7477 
7485  const Image& image,
7486  cl_float4 fillColor,
7487  const array<size_type, 3>& origin,
7488  const array<size_type, 3>& region,
7489  const vector<Event>* events = NULL,
7490  Event* event = NULL) const
7491  {
7492  cl_event tmp;
7493  cl_int err = detail::errHandler(
7494  ::clEnqueueFillImage(
7495  object_,
7496  image(),
7497  static_cast<void*>(&fillColor),
7498  origin.data(),
7499  region.data(),
7500  (events != NULL) ? (cl_uint) events->size() : 0,
7501  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7502  (event != NULL) ? &tmp : NULL),
7503  __ENQUEUE_FILL_IMAGE_ERR);
7504 
7505  if (event != NULL && err == CL_SUCCESS)
7506  *event = tmp;
7507 
7508  return err;
7509  }
7510 
7519  const Image& image,
7520  cl_int4 fillColor,
7521  const array<size_type, 3>& origin,
7522  const array<size_type, 3>& region,
7523  const vector<Event>* events = NULL,
7524  Event* event = NULL) const
7525  {
7526  cl_event tmp;
7527  cl_int err = detail::errHandler(
7528  ::clEnqueueFillImage(
7529  object_,
7530  image(),
7531  static_cast<void*>(&fillColor),
7532  origin.data(),
7533  region.data(),
7534  (events != NULL) ? (cl_uint) events->size() : 0,
7535  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7536  (event != NULL) ? &tmp : NULL),
7537  __ENQUEUE_FILL_IMAGE_ERR);
7538 
7539  if (event != NULL && err == CL_SUCCESS)
7540  *event = tmp;
7541 
7542  return err;
7543  }
7544 
7553  const Image& image,
7554  cl_uint4 fillColor,
7555  const array<size_type, 3>& origin,
7556  const array<size_type, 3>& region,
7557  const vector<Event>* events = NULL,
7558  Event* event = NULL) const
7559  {
7560  cl_event tmp;
7561  cl_int err = detail::errHandler(
7562  ::clEnqueueFillImage(
7563  object_,
7564  image(),
7565  static_cast<void*>(&fillColor),
7566  origin.data(),
7567  region.data(),
7568  (events != NULL) ? (cl_uint) events->size() : 0,
7569  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7570  (event != NULL) ? &tmp : NULL),
7571  __ENQUEUE_FILL_IMAGE_ERR);
7572 
7573  if (event != NULL && err == CL_SUCCESS)
7574  *event = tmp;
7575 
7576  return err;
7577  }
7578 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7579 
7580  cl_int enqueueCopyImageToBuffer(
7581  const Image& src,
7582  const Buffer& dst,
7583  const array<size_type, 3>& src_origin,
7584  const array<size_type, 3>& region,
7585  size_type dst_offset,
7586  const vector<Event>* events = NULL,
7587  Event* event = NULL) const
7588  {
7589  cl_event tmp;
7590  cl_int err = detail::errHandler(
7591  ::clEnqueueCopyImageToBuffer(
7592  object_,
7593  src(),
7594  dst(),
7595  src_origin.data(),
7596  region.data(),
7597  dst_offset,
7598  (events != NULL) ? (cl_uint) events->size() : 0,
7599  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7600  (event != NULL) ? &tmp : NULL),
7601  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7602 
7603  if (event != NULL && err == CL_SUCCESS)
7604  *event = tmp;
7605 
7606  return err;
7607  }
7608 
7609  cl_int enqueueCopyBufferToImage(
7610  const Buffer& src,
7611  const Image& dst,
7612  size_type src_offset,
7613  const array<size_type, 3>& dst_origin,
7614  const array<size_type, 3>& region,
7615  const vector<Event>* events = NULL,
7616  Event* event = NULL) const
7617  {
7618  cl_event tmp;
7619  cl_int err = detail::errHandler(
7620  ::clEnqueueCopyBufferToImage(
7621  object_,
7622  src(),
7623  dst(),
7624  src_offset,
7625  dst_origin.data(),
7626  region.data(),
7627  (events != NULL) ? (cl_uint) events->size() : 0,
7628  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7629  (event != NULL) ? &tmp : NULL),
7630  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7631 
7632  if (event != NULL && err == CL_SUCCESS)
7633  *event = tmp;
7634 
7635  return err;
7636  }
7637 
7638  void* enqueueMapBuffer(
7639  const Buffer& buffer,
7640  cl_bool blocking,
7641  cl_map_flags flags,
7642  size_type offset,
7643  size_type size,
7644  const vector<Event>* events = NULL,
7645  Event* event = NULL,
7646  cl_int* err = NULL) const
7647  {
7648  cl_event tmp;
7649  cl_int error;
7650  void * result = ::clEnqueueMapBuffer(
7651  object_, buffer(), blocking, flags, offset, size,
7652  (events != NULL) ? (cl_uint) events->size() : 0,
7653  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7654  (event != NULL) ? &tmp : NULL,
7655  &error);
7656 
7657  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
7658  if (err != NULL) {
7659  *err = error;
7660  }
7661  if (event != NULL && error == CL_SUCCESS)
7662  *event = tmp;
7663 
7664  return result;
7665  }
7666 
7667  void* enqueueMapImage(
7668  const Image& buffer,
7669  cl_bool blocking,
7670  cl_map_flags flags,
7671  const array<size_type, 3>& origin,
7672  const array<size_type, 3>& region,
7673  size_type * row_pitch,
7674  size_type * slice_pitch,
7675  const vector<Event>* events = NULL,
7676  Event* event = NULL,
7677  cl_int* err = NULL) const
7678  {
7679  cl_event tmp;
7680  cl_int error;
7681  void * result = ::clEnqueueMapImage(
7682  object_, buffer(), blocking, flags,
7683  origin.data(),
7684  region.data(),
7685  row_pitch, slice_pitch,
7686  (events != NULL) ? (cl_uint) events->size() : 0,
7687  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7688  (event != NULL) ? &tmp : NULL,
7689  &error);
7690 
7691  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
7692  if (err != NULL) {
7693  *err = error;
7694  }
7695  if (event != NULL && error == CL_SUCCESS)
7696  *event = tmp;
7697  return result;
7698  }
7699 
7700 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7701 
7705  template<typename T>
7707  T* ptr,
7708  cl_bool blocking,
7709  cl_map_flags flags,
7710  size_type size,
7711  const vector<Event>* events = NULL,
7712  Event* event = NULL) const
7713  {
7714  cl_event tmp;
7715  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7716  object_, blocking, flags, static_cast<void*>(ptr), size,
7717  (events != NULL) ? (cl_uint)events->size() : 0,
7718  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7719  (event != NULL) ? &tmp : NULL),
7720  __ENQUEUE_MAP_BUFFER_ERR);
7721 
7722  if (event != NULL && err == CL_SUCCESS)
7723  *event = tmp;
7724 
7725  return err;
7726  }
7727 
7728 
7733  template<typename T, class D>
7735  cl::pointer<T, D> &ptr,
7736  cl_bool blocking,
7737  cl_map_flags flags,
7738  size_type size,
7739  const vector<Event>* events = NULL,
7740  Event* event = NULL) const
7741  {
7742  cl_event tmp;
7743  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7744  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
7745  (events != NULL) ? (cl_uint)events->size() : 0,
7746  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7747  (event != NULL) ? &tmp : NULL),
7748  __ENQUEUE_MAP_BUFFER_ERR);
7749 
7750  if (event != NULL && err == CL_SUCCESS)
7751  *event = tmp;
7752 
7753  return err;
7754  }
7755 
7760  template<typename T, class Alloc>
7762  cl::vector<T, Alloc> &container,
7763  cl_bool blocking,
7764  cl_map_flags flags,
7765  const vector<Event>* events = NULL,
7766  Event* event = NULL) const
7767  {
7768  cl_event tmp;
7769  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7770  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
7771  (events != NULL) ? (cl_uint)events->size() : 0,
7772  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7773  (event != NULL) ? &tmp : NULL),
7774  __ENQUEUE_MAP_BUFFER_ERR);
7775 
7776  if (event != NULL && err == CL_SUCCESS)
7777  *event = tmp;
7778 
7779  return err;
7780  }
7781 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7782 
7783  cl_int enqueueUnmapMemObject(
7784  const Memory& memory,
7785  void* mapped_ptr,
7786  const vector<Event>* events = NULL,
7787  Event* event = NULL) const
7788  {
7789  cl_event tmp;
7790  cl_int err = detail::errHandler(
7791  ::clEnqueueUnmapMemObject(
7792  object_, memory(), mapped_ptr,
7793  (events != NULL) ? (cl_uint) events->size() : 0,
7794  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7795  (event != NULL) ? &tmp : NULL),
7796  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7797 
7798  if (event != NULL && err == CL_SUCCESS)
7799  *event = tmp;
7800 
7801  return err;
7802  }
7803 
7804 
7805 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7806 
7810  template<typename T>
7812  T* ptr,
7813  const vector<Event>* events = NULL,
7814  Event* event = NULL) const
7815  {
7816  cl_event tmp;
7817  cl_int err = detail::errHandler(
7818  ::clEnqueueSVMUnmap(
7819  object_, static_cast<void*>(ptr),
7820  (events != NULL) ? (cl_uint)events->size() : 0,
7821  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7822  (event != NULL) ? &tmp : NULL),
7823  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7824 
7825  if (event != NULL && err == CL_SUCCESS)
7826  *event = tmp;
7827 
7828  return err;
7829  }
7830 
7835  template<typename T, class D>
7837  cl::pointer<T, D> &ptr,
7838  const vector<Event>* events = NULL,
7839  Event* event = NULL) const
7840  {
7841  cl_event tmp;
7842  cl_int err = detail::errHandler(
7843  ::clEnqueueSVMUnmap(
7844  object_, static_cast<void*>(ptr.get()),
7845  (events != NULL) ? (cl_uint)events->size() : 0,
7846  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7847  (event != NULL) ? &tmp : NULL),
7848  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7849 
7850  if (event != NULL && err == CL_SUCCESS)
7851  *event = tmp;
7852 
7853  return err;
7854  }
7855 
7860  template<typename T, class Alloc>
7862  cl::vector<T, Alloc> &container,
7863  const vector<Event>* events = NULL,
7864  Event* event = NULL) const
7865  {
7866  cl_event tmp;
7867  cl_int err = detail::errHandler(
7868  ::clEnqueueSVMUnmap(
7869  object_, static_cast<void*>(container.data()),
7870  (events != NULL) ? (cl_uint)events->size() : 0,
7871  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7872  (event != NULL) ? &tmp : NULL),
7873  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7874 
7875  if (event != NULL && err == CL_SUCCESS)
7876  *event = tmp;
7877 
7878  return err;
7879  }
7880 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7881 
7882 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7883 
7895  const vector<Event> *events = 0,
7896  Event *event = 0) const
7897  {
7898  cl_event tmp;
7899  cl_int err = detail::errHandler(
7900  ::clEnqueueMarkerWithWaitList(
7901  object_,
7902  (events != NULL) ? (cl_uint) events->size() : 0,
7903  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7904  (event != NULL) ? &tmp : NULL),
7905  __ENQUEUE_MARKER_WAIT_LIST_ERR);
7906 
7907  if (event != NULL && err == CL_SUCCESS)
7908  *event = tmp;
7909 
7910  return err;
7911  }
7912 
7925  const vector<Event> *events = 0,
7926  Event *event = 0) const
7927  {
7928  cl_event tmp;
7929  cl_int err = detail::errHandler(
7930  ::clEnqueueBarrierWithWaitList(
7931  object_,
7932  (events != NULL) ? (cl_uint) events->size() : 0,
7933  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7934  (event != NULL) ? &tmp : NULL),
7935  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
7936 
7937  if (event != NULL && err == CL_SUCCESS)
7938  *event = tmp;
7939 
7940  return err;
7941  }
7942 
7948  const vector<Memory> &memObjects,
7949  cl_mem_migration_flags flags,
7950  const vector<Event>* events = NULL,
7951  Event* event = NULL
7952  ) const
7953  {
7954  cl_event tmp;
7955 
7956  vector<cl_mem> localMemObjects(memObjects.size());
7957 
7958  for( int i = 0; i < (int)memObjects.size(); ++i ) {
7959  localMemObjects[i] = memObjects[i]();
7960  }
7961 
7962 
7963  cl_int err = detail::errHandler(
7964  ::clEnqueueMigrateMemObjects(
7965  object_,
7966  (cl_uint)memObjects.size(),
7967  localMemObjects.data(),
7968  flags,
7969  (events != NULL) ? (cl_uint) events->size() : 0,
7970  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7971  (event != NULL) ? &tmp : NULL),
7972  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7973 
7974  if (event != NULL && err == CL_SUCCESS)
7975  *event = tmp;
7976 
7977  return err;
7978  }
7979 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7980 
7981  cl_int enqueueNDRangeKernel(
7982  const Kernel& kernel,
7983  const NDRange& offset,
7984  const NDRange& global,
7985  const NDRange& local = NullRange,
7986  const vector<Event>* events = NULL,
7987  Event* event = NULL) const
7988  {
7989  cl_event tmp;
7990  cl_int err = detail::errHandler(
7991  ::clEnqueueNDRangeKernel(
7992  object_, kernel(), (cl_uint) global.dimensions(),
7993  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
7994  (const size_type*) global,
7995  local.dimensions() != 0 ? (const size_type*) local : NULL,
7996  (events != NULL) ? (cl_uint) events->size() : 0,
7997  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7998  (event != NULL) ? &tmp : NULL),
7999  __ENQUEUE_NDRANGE_KERNEL_ERR);
8000 
8001  if (event != NULL && err == CL_SUCCESS)
8002  *event = tmp;
8003 
8004  return err;
8005  }
8006 
8007 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8008  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
8009  const Kernel& kernel,
8010  const vector<Event>* events = NULL,
8011  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
8012  {
8013  cl_event tmp;
8014  cl_int err = detail::errHandler(
8015  ::clEnqueueTask(
8016  object_, kernel(),
8017  (events != NULL) ? (cl_uint) events->size() : 0,
8018  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8019  (event != NULL) ? &tmp : NULL),
8020  __ENQUEUE_TASK_ERR);
8021 
8022  if (event != NULL && err == CL_SUCCESS)
8023  *event = tmp;
8024 
8025  return err;
8026  }
8027 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8028 
8029  cl_int enqueueNativeKernel(
8030  void (CL_CALLBACK *userFptr)(void *),
8031  std::pair<void*, size_type> args,
8032  const vector<Memory>* mem_objects = NULL,
8033  const vector<const void*>* mem_locs = NULL,
8034  const vector<Event>* events = NULL,
8035  Event* event = NULL) const
8036  {
8037  size_type elements = 0;
8038  if (mem_objects != NULL) {
8039  elements = mem_objects->size();
8040  }
8041  vector<cl_mem> mems(elements);
8042  for (unsigned int i = 0; i < elements; i++) {
8043  mems[i] = ((*mem_objects)[i])();
8044  }
8045 
8046  cl_event tmp;
8047  cl_int err = detail::errHandler(
8048  ::clEnqueueNativeKernel(
8049  object_, userFptr, args.first, args.second,
8050  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8051  mems.data(),
8052  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
8053  (events != NULL) ? (cl_uint) events->size() : 0,
8054  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8055  (event != NULL) ? &tmp : NULL),
8056  __ENQUEUE_NATIVE_KERNEL);
8057 
8058  if (event != NULL && err == CL_SUCCESS)
8059  *event = tmp;
8060 
8061  return err;
8062  }
8063 
8067 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8068  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8069  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8070  {
8071  cl_event tmp;
8072  cl_int err = detail::errHandler(
8073  ::clEnqueueMarker(
8074  object_,
8075  (event != NULL) ? &tmp : NULL),
8076  __ENQUEUE_MARKER_ERR);
8077 
8078  if (event != NULL && err == CL_SUCCESS)
8079  *event = tmp;
8080 
8081  return err;
8082  }
8083 
8084  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8085  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8086  {
8087  return detail::errHandler(
8088  ::clEnqueueWaitForEvents(
8089  object_,
8090  (cl_uint) events.size(),
8091  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
8092  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
8093  }
8094 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8095 
8096  cl_int enqueueAcquireGLObjects(
8097  const vector<Memory>* mem_objects = NULL,
8098  const vector<Event>* events = NULL,
8099  Event* event = NULL) const
8100  {
8101  cl_event tmp;
8102  cl_int err = detail::errHandler(
8103  ::clEnqueueAcquireGLObjects(
8104  object_,
8105  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8106  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8107  (events != NULL) ? (cl_uint) events->size() : 0,
8108  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8109  (event != NULL) ? &tmp : NULL),
8110  __ENQUEUE_ACQUIRE_GL_ERR);
8111 
8112  if (event != NULL && err == CL_SUCCESS)
8113  *event = tmp;
8114 
8115  return err;
8116  }
8117 
8118  cl_int enqueueReleaseGLObjects(
8119  const vector<Memory>* mem_objects = NULL,
8120  const vector<Event>* events = NULL,
8121  Event* event = NULL) const
8122  {
8123  cl_event tmp;
8124  cl_int err = detail::errHandler(
8125  ::clEnqueueReleaseGLObjects(
8126  object_,
8127  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8128  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8129  (events != NULL) ? (cl_uint) events->size() : 0,
8130  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8131  (event != NULL) ? &tmp : NULL),
8132  __ENQUEUE_RELEASE_GL_ERR);
8133 
8134  if (event != NULL && err == CL_SUCCESS)
8135  *event = tmp;
8136 
8137  return err;
8138  }
8139 
8140 #if defined (CL_HPP_USE_DX_INTEROP)
8141 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8142  cl_command_queue command_queue, cl_uint num_objects,
8143  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8144  const cl_event* event_wait_list, cl_event* event);
8145 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8146  cl_command_queue command_queue, cl_uint num_objects,
8147  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8148  const cl_event* event_wait_list, cl_event* event);
8149 
8150  cl_int enqueueAcquireD3D10Objects(
8151  const vector<Memory>* mem_objects = NULL,
8152  const vector<Event>* events = NULL,
8153  Event* event = NULL) const
8154  {
8155  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8156 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8157  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8158  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8159  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8160  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8161 #endif
8162 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8163  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8164 #endif
8165 
8166  cl_event tmp;
8167  cl_int err = detail::errHandler(
8168  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8169  object_,
8170  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8171  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8172  (events != NULL) ? (cl_uint) events->size() : 0,
8173  (events != NULL) ? (cl_event*) &events->front() : NULL,
8174  (event != NULL) ? &tmp : NULL),
8175  __ENQUEUE_ACQUIRE_GL_ERR);
8176 
8177  if (event != NULL && err == CL_SUCCESS)
8178  *event = tmp;
8179 
8180  return err;
8181  }
8182 
8183  cl_int enqueueReleaseD3D10Objects(
8184  const vector<Memory>* mem_objects = NULL,
8185  const vector<Event>* events = NULL,
8186  Event* event = NULL) const
8187  {
8188  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8189 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8190  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8191  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8192  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8193  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8194 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8195 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8196  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8197 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8198 
8199  cl_event tmp;
8200  cl_int err = detail::errHandler(
8201  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8202  object_,
8203  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8204  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8205  (events != NULL) ? (cl_uint) events->size() : 0,
8206  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8207  (event != NULL) ? &tmp : NULL),
8208  __ENQUEUE_RELEASE_GL_ERR);
8209 
8210  if (event != NULL && err == CL_SUCCESS)
8211  *event = tmp;
8212 
8213  return err;
8214  }
8215 #endif
8216 
8220 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8221  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8222  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8223  {
8224  return detail::errHandler(
8225  ::clEnqueueBarrier(object_),
8226  __ENQUEUE_BARRIER_ERR);
8227  }
8228 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8229 
8230  cl_int flush() const
8231  {
8232  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8233  }
8234 
8235  cl_int finish() const
8236  {
8237  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8238  }
8239 }; // CommandQueue
8240 
8241 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8242 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8243 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8244 
8245 
8246 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8247 enum class DeviceQueueProperties : cl_command_queue_properties
8248 {
8249  None = 0,
8250  Profiling = CL_QUEUE_PROFILING_ENABLE,
8251 };
8252 
8253 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8254 {
8255  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8256 }
8257 
8261 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8262 {
8263 public:
8264 
8269 
8273  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8274  {
8275  cl_int error;
8278 
8279  cl_command_queue_properties mergedProperties =
8280  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8281 
8282  cl_queue_properties queue_properties[] = {
8283  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8284  object_ = ::clCreateCommandQueueWithProperties(
8285  context(), device(), queue_properties, &error);
8286 
8287  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8288  if (err != NULL) {
8289  *err = error;
8290  }
8291  }
8292 
8297  const Context& context,
8298  const Device& device,
8299  DeviceQueueProperties properties = DeviceQueueProperties::None,
8300  cl_int* err = NULL)
8301  {
8302  cl_int error;
8303 
8304  cl_command_queue_properties mergedProperties =
8305  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8306  cl_queue_properties queue_properties[] = {
8307  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8308  object_ = ::clCreateCommandQueueWithProperties(
8309  context(), device(), queue_properties, &error);
8310 
8311  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8312  if (err != NULL) {
8313  *err = error;
8314  }
8315  }
8316 
8321  const Context& context,
8322  const Device& device,
8323  cl_uint queueSize,
8324  DeviceQueueProperties properties = DeviceQueueProperties::None,
8325  cl_int* err = NULL)
8326  {
8327  cl_int error;
8328 
8329  cl_command_queue_properties mergedProperties =
8330  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8331  cl_queue_properties queue_properties[] = {
8332  CL_QUEUE_PROPERTIES, mergedProperties,
8333  CL_QUEUE_SIZE, queueSize,
8334  0 };
8335  object_ = ::clCreateCommandQueueWithProperties(
8336  context(), device(), queue_properties, &error);
8337 
8338  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8339  if (err != NULL) {
8340  *err = error;
8341  }
8342  }
8343 
8350  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8351  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8352 
8353  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8354  {
8356  return *this;
8357  }
8358 
8362  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8363 
8367  DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
8368  {
8370  return *this;
8371  }
8372 
8376  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8377 
8382  {
8383  detail::Wrapper<cl_type>::operator=(std::move(queue));
8384  return *this;
8385  }
8386 
8387  template <typename T>
8388  cl_int getInfo(cl_command_queue_info name, T* param) const
8389  {
8390  return detail::errHandler(
8391  detail::getInfo(
8392  &::clGetCommandQueueInfo, object_, name, param),
8393  __GET_COMMAND_QUEUE_INFO_ERR);
8394  }
8395 
8396  template <cl_int name> typename
8398  getInfo(cl_int* err = NULL) const
8399  {
8400  typename detail::param_traits<
8401  detail::cl_command_queue_info, name>::param_type param;
8402  cl_int result = getInfo(name, &param);
8403  if (err != NULL) {
8404  *err = result;
8405  }
8406  return param;
8407  }
8408 
8416  cl_int *err = nullptr)
8417  {
8418  cl_int error;
8421 
8422  cl_command_queue_properties properties =
8423  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8424  cl_queue_properties queue_properties[] = {
8425  CL_QUEUE_PROPERTIES, properties,
8426  0 };
8427  DeviceCommandQueue deviceQueue(
8428  ::clCreateCommandQueueWithProperties(
8429  context(), device(), queue_properties, &error));
8430 
8431  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8432  if (err != NULL) {
8433  *err = error;
8434  }
8435 
8436  return deviceQueue;
8437  }
8438 
8446  const Context &context, const Device &device, cl_int *err = nullptr)
8447  {
8448  cl_int error;
8449 
8450  cl_command_queue_properties properties =
8451  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8452  cl_queue_properties queue_properties[] = {
8453  CL_QUEUE_PROPERTIES, properties,
8454  0 };
8455  DeviceCommandQueue deviceQueue(
8456  ::clCreateCommandQueueWithProperties(
8457  context(), device(), queue_properties, &error));
8458 
8459  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8460  if (err != NULL) {
8461  *err = error;
8462  }
8463 
8464  return deviceQueue;
8465  }
8466 
8474  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8475  {
8476  cl_int error;
8477 
8478  cl_command_queue_properties properties =
8479  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8480  cl_queue_properties queue_properties[] = {
8481  CL_QUEUE_PROPERTIES, properties,
8482  CL_QUEUE_SIZE, queueSize,
8483  0 };
8484  DeviceCommandQueue deviceQueue(
8485  ::clCreateCommandQueueWithProperties(
8486  context(), device(), queue_properties, &error));
8487 
8488  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8489  if (err != NULL) {
8490  *err = error;
8491  }
8492 
8493  return deviceQueue;
8494  }
8495 }; // DeviceCommandQueue
8496 
8497 namespace detail
8498 {
8499  // Specialization for device command queue
8500  template <>
8502  {
8503  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
8504  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
8505  };
8506 } // namespace detail
8507 
8508 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8509 
8510 
8511 template< typename IteratorType >
8513  const Context &context,
8514  IteratorType startIterator,
8515  IteratorType endIterator,
8516  bool readOnly,
8517  bool useHostPtr,
8518  cl_int* err)
8519 {
8520  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8521  cl_int error;
8522 
8523  cl_mem_flags flags = 0;
8524  if( readOnly ) {
8525  flags |= CL_MEM_READ_ONLY;
8526  }
8527  else {
8528  flags |= CL_MEM_READ_WRITE;
8529  }
8530  if( useHostPtr ) {
8531  flags |= CL_MEM_USE_HOST_PTR;
8532  }
8533 
8534  size_type size = sizeof(DataType)*(endIterator - startIterator);
8535 
8536  if( useHostPtr ) {
8537  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8538  } else {
8539  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8540  }
8541 
8542  detail::errHandler(error, __CREATE_BUFFER_ERR);
8543  if (err != NULL) {
8544  *err = error;
8545  }
8546 
8547  if( !useHostPtr ) {
8548  CommandQueue queue(context, 0, &error);
8549  detail::errHandler(error, __CREATE_BUFFER_ERR);
8550  if (err != NULL) {
8551  *err = error;
8552  }
8553 
8554  error = cl::copy(queue, startIterator, endIterator, *this);
8555  detail::errHandler(error, __CREATE_BUFFER_ERR);
8556  if (err != NULL) {
8557  *err = error;
8558  }
8559  }
8560 }
8561 
8562 template< typename IteratorType >
8564  const CommandQueue &queue,
8565  IteratorType startIterator,
8566  IteratorType endIterator,
8567  bool readOnly,
8568  bool useHostPtr,
8569  cl_int* err)
8570 {
8571  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8572  cl_int error;
8573 
8574  cl_mem_flags flags = 0;
8575  if (readOnly) {
8576  flags |= CL_MEM_READ_ONLY;
8577  }
8578  else {
8579  flags |= CL_MEM_READ_WRITE;
8580  }
8581  if (useHostPtr) {
8582  flags |= CL_MEM_USE_HOST_PTR;
8583  }
8584 
8585  size_type size = sizeof(DataType)*(endIterator - startIterator);
8586 
8587  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
8588 
8589  if (useHostPtr) {
8590  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8591  }
8592  else {
8593  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8594  }
8595 
8596  detail::errHandler(error, __CREATE_BUFFER_ERR);
8597  if (err != NULL) {
8598  *err = error;
8599  }
8600 
8601  if (!useHostPtr) {
8602  error = cl::copy(queue, startIterator, endIterator, *this);
8603  detail::errHandler(error, __CREATE_BUFFER_ERR);
8604  if (err != NULL) {
8605  *err = error;
8606  }
8607  }
8608 }
8609 
8610 inline cl_int enqueueReadBuffer(
8611  const Buffer& buffer,
8612  cl_bool blocking,
8613  size_type offset,
8614  size_type size,
8615  void* ptr,
8616  const vector<Event>* events = NULL,
8617  Event* event = NULL)
8618 {
8619  cl_int error;
8620  CommandQueue queue = CommandQueue::getDefault(&error);
8621 
8622  if (error != CL_SUCCESS) {
8623  return error;
8624  }
8625 
8626  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
8627 }
8628 
8629 inline cl_int enqueueWriteBuffer(
8630  const Buffer& buffer,
8631  cl_bool blocking,
8632  size_type offset,
8633  size_type size,
8634  const void* ptr,
8635  const vector<Event>* events = NULL,
8636  Event* event = NULL)
8637 {
8638  cl_int error;
8639  CommandQueue queue = CommandQueue::getDefault(&error);
8640 
8641  if (error != CL_SUCCESS) {
8642  return error;
8643  }
8644 
8645  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
8646 }
8647 
8648 inline void* enqueueMapBuffer(
8649  const Buffer& buffer,
8650  cl_bool blocking,
8651  cl_map_flags flags,
8652  size_type offset,
8653  size_type size,
8654  const vector<Event>* events = NULL,
8655  Event* event = NULL,
8656  cl_int* err = NULL)
8657 {
8658  cl_int error;
8659  CommandQueue queue = CommandQueue::getDefault(&error);
8660  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8661  if (err != NULL) {
8662  *err = error;
8663  }
8664 
8665  void * result = ::clEnqueueMapBuffer(
8666  queue(), buffer(), blocking, flags, offset, size,
8667  (events != NULL) ? (cl_uint) events->size() : 0,
8668  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8669  (cl_event*) event,
8670  &error);
8671 
8672  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8673  if (err != NULL) {
8674  *err = error;
8675  }
8676  return result;
8677 }
8678 
8679 
8680 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8681 
8686 template<typename T>
8687 inline cl_int enqueueMapSVM(
8688  T* ptr,
8689  cl_bool blocking,
8690  cl_map_flags flags,
8691  size_type size,
8692  const vector<Event>* events,
8693  Event* event)
8694 {
8695  cl_int error;
8696  CommandQueue queue = CommandQueue::getDefault(&error);
8697  if (error != CL_SUCCESS) {
8698  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8699  }
8700 
8701  return queue.enqueueMapSVM(
8702  ptr, blocking, flags, size, events, event);
8703 }
8704 
8710 template<typename T, class D>
8711 inline cl_int enqueueMapSVM(
8712  cl::pointer<T, D> ptr,
8713  cl_bool blocking,
8714  cl_map_flags flags,
8715  size_type size,
8716  const vector<Event>* events = NULL,
8717  Event* event = NULL)
8718 {
8719  cl_int error;
8720  CommandQueue queue = CommandQueue::getDefault(&error);
8721  if (error != CL_SUCCESS) {
8722  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8723  }
8724 
8725  return queue.enqueueMapSVM(
8726  ptr, blocking, flags, size, events, event);
8727 }
8728 
8734 template<typename T, class Alloc>
8735 inline cl_int enqueueMapSVM(
8736  cl::vector<T, Alloc> container,
8737  cl_bool blocking,
8738  cl_map_flags flags,
8739  const vector<Event>* events = NULL,
8740  Event* event = NULL)
8741 {
8742  cl_int error;
8743  CommandQueue queue = CommandQueue::getDefault(&error);
8744  if (error != CL_SUCCESS) {
8745  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8746  }
8747 
8748  return queue.enqueueMapSVM(
8749  container, blocking, flags, events, event);
8750 }
8751 
8752 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8753 
8754 inline cl_int enqueueUnmapMemObject(
8755  const Memory& memory,
8756  void* mapped_ptr,
8757  const vector<Event>* events = NULL,
8758  Event* event = NULL)
8759 {
8760  cl_int error;
8761  CommandQueue queue = CommandQueue::getDefault(&error);
8762  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8763  if (error != CL_SUCCESS) {
8764  return error;
8765  }
8766 
8767  cl_event tmp;
8768  cl_int err = detail::errHandler(
8769  ::clEnqueueUnmapMemObject(
8770  queue(), memory(), mapped_ptr,
8771  (events != NULL) ? (cl_uint)events->size() : 0,
8772  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8773  (event != NULL) ? &tmp : NULL),
8774  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8775 
8776  if (event != NULL && err == CL_SUCCESS)
8777  *event = tmp;
8778 
8779  return err;
8780 }
8781 
8782 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8783 
8788 template<typename T>
8789 inline cl_int enqueueUnmapSVM(
8790  T* ptr,
8791  const vector<Event>* events = NULL,
8792  Event* event = NULL)
8793 {
8794  cl_int error;
8795  CommandQueue queue = CommandQueue::getDefault(&error);
8796  if (error != CL_SUCCESS) {
8797  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8798  }
8799 
8800  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8801  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8802 
8803 }
8804 
8810 template<typename T, class D>
8811 inline cl_int enqueueUnmapSVM(
8812  cl::pointer<T, D> &ptr,
8813  const vector<Event>* events = NULL,
8814  Event* event = NULL)
8815 {
8816  cl_int error;
8817  CommandQueue queue = CommandQueue::getDefault(&error);
8818  if (error != CL_SUCCESS) {
8819  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8820  }
8821 
8822  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8823  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8824 }
8825 
8831 template<typename T, class Alloc>
8832 inline cl_int enqueueUnmapSVM(
8833  cl::vector<T, Alloc> &container,
8834  const vector<Event>* events = NULL,
8835  Event* event = NULL)
8836 {
8837  cl_int error;
8838  CommandQueue queue = CommandQueue::getDefault(&error);
8839  if (error != CL_SUCCESS) {
8840  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8841  }
8842 
8843  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
8844  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8845 }
8846 
8847 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8848 
8849 inline cl_int enqueueCopyBuffer(
8850  const Buffer& src,
8851  const Buffer& dst,
8852  size_type src_offset,
8853  size_type dst_offset,
8854  size_type size,
8855  const vector<Event>* events = NULL,
8856  Event* event = NULL)
8857 {
8858  cl_int error;
8859  CommandQueue queue = CommandQueue::getDefault(&error);
8860 
8861  if (error != CL_SUCCESS) {
8862  return error;
8863  }
8864 
8865  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
8866 }
8867 
8873 template< typename IteratorType >
8874 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8875 {
8876  cl_int error;
8877  CommandQueue queue = CommandQueue::getDefault(&error);
8878  if (error != CL_SUCCESS)
8879  return error;
8880 
8881  return cl::copy(queue, startIterator, endIterator, buffer);
8882 }
8883 
8889 template< typename IteratorType >
8890 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8891 {
8892  cl_int error;
8893  CommandQueue queue = CommandQueue::getDefault(&error);
8894  if (error != CL_SUCCESS)
8895  return error;
8896 
8897  return cl::copy(queue, buffer, startIterator, endIterator);
8898 }
8899 
8905 template< typename IteratorType >
8906 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8907 {
8908  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8909  cl_int error;
8910 
8911  size_type length = endIterator-startIterator;
8912  size_type byteLength = length*sizeof(DataType);
8913 
8914  DataType *pointer =
8915  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
8916  // if exceptions enabled, enqueueMapBuffer will throw
8917  if( error != CL_SUCCESS ) {
8918  return error;
8919  }
8920 #if defined(_MSC_VER)
8921  std::copy(
8922  startIterator,
8923  endIterator,
8924  stdext::checked_array_iterator<DataType*>(
8925  pointer, length));
8926 #else
8927  std::copy(startIterator, endIterator, pointer);
8928 #endif
8929  Event endEvent;
8930  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8931  // if exceptions enabled, enqueueUnmapMemObject will throw
8932  if( error != CL_SUCCESS ) {
8933  return error;
8934  }
8935  endEvent.wait();
8936  return CL_SUCCESS;
8937 }
8938 
8944 template< typename IteratorType >
8945 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8946 {
8947  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8948  cl_int error;
8949 
8950  size_type length = endIterator-startIterator;
8951  size_type byteLength = length*sizeof(DataType);
8952 
8953  DataType *pointer =
8954  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
8955  // if exceptions enabled, enqueueMapBuffer will throw
8956  if( error != CL_SUCCESS ) {
8957  return error;
8958  }
8959  std::copy(pointer, pointer + length, startIterator);
8960  Event endEvent;
8961  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8962  // if exceptions enabled, enqueueUnmapMemObject will throw
8963  if( error != CL_SUCCESS ) {
8964  return error;
8965  }
8966  endEvent.wait();
8967  return CL_SUCCESS;
8968 }
8969 
8970 
8971 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8972 
8975 template<typename T, class Alloc>
8976 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
8977 {
8978  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
8979 }
8980 
8984 template<typename T, class Alloc>
8985 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
8986 {
8987  return enqueueUnmapSVM(container);
8988 }
8989 
8990 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8991 
8992 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8993 inline cl_int enqueueReadBufferRect(
8994  const Buffer& buffer,
8995  cl_bool blocking,
8996  const array<size_type, 3>& buffer_offset,
8997  const array<size_type, 3>& host_offset,
8998  const array<size_type, 3>& region,
8999  size_type buffer_row_pitch,
9000  size_type buffer_slice_pitch,
9001  size_type host_row_pitch,
9002  size_type host_slice_pitch,
9003  void *ptr,
9004  const vector<Event>* events = NULL,
9005  Event* event = NULL)
9006 {
9007  cl_int error;
9008  CommandQueue queue = CommandQueue::getDefault(&error);
9009 
9010  if (error != CL_SUCCESS) {
9011  return error;
9012  }
9013 
9014  return queue.enqueueReadBufferRect(
9015  buffer,
9016  blocking,
9017  buffer_offset,
9018  host_offset,
9019  region,
9020  buffer_row_pitch,
9021  buffer_slice_pitch,
9022  host_row_pitch,
9023  host_slice_pitch,
9024  ptr,
9025  events,
9026  event);
9027 }
9028 
9029 inline cl_int enqueueWriteBufferRect(
9030  const Buffer& buffer,
9031  cl_bool blocking,
9032  const array<size_type, 3>& buffer_offset,
9033  const array<size_type, 3>& host_offset,
9034  const array<size_type, 3>& region,
9035  size_type buffer_row_pitch,
9036  size_type buffer_slice_pitch,
9037  size_type host_row_pitch,
9038  size_type host_slice_pitch,
9039  const void *ptr,
9040  const vector<Event>* events = NULL,
9041  Event* event = NULL)
9042 {
9043  cl_int error;
9044  CommandQueue queue = CommandQueue::getDefault(&error);
9045 
9046  if (error != CL_SUCCESS) {
9047  return error;
9048  }
9049 
9050  return queue.enqueueWriteBufferRect(
9051  buffer,
9052  blocking,
9053  buffer_offset,
9054  host_offset,
9055  region,
9056  buffer_row_pitch,
9057  buffer_slice_pitch,
9058  host_row_pitch,
9059  host_slice_pitch,
9060  ptr,
9061  events,
9062  event);
9063 }
9064 
9065 inline cl_int enqueueCopyBufferRect(
9066  const Buffer& src,
9067  const Buffer& dst,
9068  const array<size_type, 3>& src_origin,
9069  const array<size_type, 3>& dst_origin,
9070  const array<size_type, 3>& region,
9071  size_type src_row_pitch,
9072  size_type src_slice_pitch,
9073  size_type dst_row_pitch,
9074  size_type dst_slice_pitch,
9075  const vector<Event>* events = NULL,
9076  Event* event = NULL)
9077 {
9078  cl_int error;
9079  CommandQueue queue = CommandQueue::getDefault(&error);
9080 
9081  if (error != CL_SUCCESS) {
9082  return error;
9083  }
9084 
9085  return queue.enqueueCopyBufferRect(
9086  src,
9087  dst,
9088  src_origin,
9089  dst_origin,
9090  region,
9091  src_row_pitch,
9092  src_slice_pitch,
9093  dst_row_pitch,
9094  dst_slice_pitch,
9095  events,
9096  event);
9097 }
9098 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
9099 
9100 inline cl_int enqueueReadImage(
9101  const Image& image,
9102  cl_bool blocking,
9103  const array<size_type, 3>& origin,
9104  const array<size_type, 3>& region,
9105  size_type row_pitch,
9106  size_type slice_pitch,
9107  void* ptr,
9108  const vector<Event>* events = NULL,
9109  Event* event = NULL)
9110 {
9111  cl_int error;
9112  CommandQueue queue = CommandQueue::getDefault(&error);
9113 
9114  if (error != CL_SUCCESS) {
9115  return error;
9116  }
9117 
9118  return queue.enqueueReadImage(
9119  image,
9120  blocking,
9121  origin,
9122  region,
9123  row_pitch,
9124  slice_pitch,
9125  ptr,
9126  events,
9127  event);
9128 }
9129 
9130 inline cl_int enqueueWriteImage(
9131  const Image& image,
9132  cl_bool blocking,
9133  const array<size_type, 3>& origin,
9134  const array<size_type, 3>& region,
9135  size_type row_pitch,
9136  size_type slice_pitch,
9137  const void* ptr,
9138  const vector<Event>* events = NULL,
9139  Event* event = NULL)
9140 {
9141  cl_int error;
9142  CommandQueue queue = CommandQueue::getDefault(&error);
9143 
9144  if (error != CL_SUCCESS) {
9145  return error;
9146  }
9147 
9148  return queue.enqueueWriteImage(
9149  image,
9150  blocking,
9151  origin,
9152  region,
9153  row_pitch,
9154  slice_pitch,
9155  ptr,
9156  events,
9157  event);
9158 }
9159 
9160 inline cl_int enqueueCopyImage(
9161  const Image& src,
9162  const Image& dst,
9163  const array<size_type, 3>& src_origin,
9164  const array<size_type, 3>& dst_origin,
9165  const array<size_type, 3>& region,
9166  const vector<Event>* events = NULL,
9167  Event* event = NULL)
9168 {
9169  cl_int error;
9170  CommandQueue queue = CommandQueue::getDefault(&error);
9171 
9172  if (error != CL_SUCCESS) {
9173  return error;
9174  }
9175 
9176  return queue.enqueueCopyImage(
9177  src,
9178  dst,
9179  src_origin,
9180  dst_origin,
9181  region,
9182  events,
9183  event);
9184 }
9185 
9186 inline cl_int enqueueCopyImageToBuffer(
9187  const Image& src,
9188  const Buffer& dst,
9189  const array<size_type, 3>& src_origin,
9190  const array<size_type, 3>& region,
9191  size_type dst_offset,
9192  const vector<Event>* events = NULL,
9193  Event* event = NULL)
9194 {
9195  cl_int error;
9196  CommandQueue queue = CommandQueue::getDefault(&error);
9197 
9198  if (error != CL_SUCCESS) {
9199  return error;
9200  }
9201 
9202  return queue.enqueueCopyImageToBuffer(
9203  src,
9204  dst,
9205  src_origin,
9206  region,
9207  dst_offset,
9208  events,
9209  event);
9210 }
9211 
9212 inline cl_int enqueueCopyBufferToImage(
9213  const Buffer& src,
9214  const Image& dst,
9215  size_type src_offset,
9216  const array<size_type, 3>& dst_origin,
9217  const array<size_type, 3>& region,
9218  const vector<Event>* events = NULL,
9219  Event* event = NULL)
9220 {
9221  cl_int error;
9222  CommandQueue queue = CommandQueue::getDefault(&error);
9223 
9224  if (error != CL_SUCCESS) {
9225  return error;
9226  }
9227 
9228  return queue.enqueueCopyBufferToImage(
9229  src,
9230  dst,
9231  src_offset,
9232  dst_origin,
9233  region,
9234  events,
9235  event);
9236 }
9237 
9238 
9239 inline cl_int flush(void)
9240 {
9241  cl_int error;
9242  CommandQueue queue = CommandQueue::getDefault(&error);
9243 
9244  if (error != CL_SUCCESS) {
9245  return error;
9246  }
9247 
9248  return queue.flush();
9249 }
9250 
9251 inline cl_int finish(void)
9252 {
9253  cl_int error;
9254  CommandQueue queue = CommandQueue::getDefault(&error);
9255 
9256  if (error != CL_SUCCESS) {
9257  return error;
9258  }
9259 
9260 
9261  return queue.finish();
9262 }
9263 
9265 {
9266 private:
9267  CommandQueue queue_;
9268  const NDRange offset_;
9269  const NDRange global_;
9270  const NDRange local_;
9271  vector<Event> events_;
9272 
9273  template<typename... Ts>
9274  friend class KernelFunctor;
9275 
9276 public:
9277  EnqueueArgs(NDRange global) :
9278  queue_(CommandQueue::getDefault()),
9279  offset_(NullRange),
9280  global_(global),
9281  local_(NullRange)
9282  {
9283 
9284  }
9285 
9286  EnqueueArgs(NDRange global, NDRange local) :
9287  queue_(CommandQueue::getDefault()),
9288  offset_(NullRange),
9289  global_(global),
9290  local_(local)
9291  {
9292 
9293  }
9294 
9295  EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
9296  queue_(CommandQueue::getDefault()),
9297  offset_(offset),
9298  global_(global),
9299  local_(local)
9300  {
9301 
9302  }
9303 
9304  EnqueueArgs(Event e, NDRange global) :
9305  queue_(CommandQueue::getDefault()),
9306  offset_(NullRange),
9307  global_(global),
9308  local_(NullRange)
9309  {
9310  events_.push_back(e);
9311  }
9312 
9313  EnqueueArgs(Event e, NDRange global, NDRange local) :
9314  queue_(CommandQueue::getDefault()),
9315  offset_(NullRange),
9316  global_(global),
9317  local_(local)
9318  {
9319  events_.push_back(e);
9320  }
9321 
9322  EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
9323  queue_(CommandQueue::getDefault()),
9324  offset_(offset),
9325  global_(global),
9326  local_(local)
9327  {
9328  events_.push_back(e);
9329  }
9330 
9331  EnqueueArgs(const vector<Event> &events, NDRange global) :
9332  queue_(CommandQueue::getDefault()),
9333  offset_(NullRange),
9334  global_(global),
9335  local_(NullRange),
9336  events_(events)
9337  {
9338 
9339  }
9340 
9341  EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
9342  queue_(CommandQueue::getDefault()),
9343  offset_(NullRange),
9344  global_(global),
9345  local_(local),
9346  events_(events)
9347  {
9348 
9349  }
9350 
9351  EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9352  queue_(CommandQueue::getDefault()),
9353  offset_(offset),
9354  global_(global),
9355  local_(local),
9356  events_(events)
9357  {
9358 
9359  }
9360 
9361  EnqueueArgs(CommandQueue &queue, NDRange global) :
9362  queue_(queue),
9363  offset_(NullRange),
9364  global_(global),
9365  local_(NullRange)
9366  {
9367 
9368  }
9369 
9370  EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
9371  queue_(queue),
9372  offset_(NullRange),
9373  global_(global),
9374  local_(local)
9375  {
9376 
9377  }
9378 
9379  EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
9380  queue_(queue),
9381  offset_(offset),
9382  global_(global),
9383  local_(local)
9384  {
9385 
9386  }
9387 
9388  EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
9389  queue_(queue),
9390  offset_(NullRange),
9391  global_(global),
9392  local_(NullRange)
9393  {
9394  events_.push_back(e);
9395  }
9396 
9397  EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
9398  queue_(queue),
9399  offset_(NullRange),
9400  global_(global),
9401  local_(local)
9402  {
9403  events_.push_back(e);
9404  }
9405 
9406  EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
9407  queue_(queue),
9408  offset_(offset),
9409  global_(global),
9410  local_(local)
9411  {
9412  events_.push_back(e);
9413  }
9414 
9415  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
9416  queue_(queue),
9417  offset_(NullRange),
9418  global_(global),
9419  local_(NullRange),
9420  events_(events)
9421  {
9422 
9423  }
9424 
9425  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
9426  queue_(queue),
9427  offset_(NullRange),
9428  global_(global),
9429  local_(local),
9430  events_(events)
9431  {
9432 
9433  }
9434 
9435  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9436  queue_(queue),
9437  offset_(offset),
9438  global_(global),
9439  local_(local),
9440  events_(events)
9441  {
9442 
9443  }
9444 };
9445 
9446 
9447 //----------------------------------------------------------------------------------------------
9448 
9449 
9454 template<typename... Ts>
9456 {
9457 private:
9458  Kernel kernel_;
9459 
9460  template<int index, typename T0, typename... T1s>
9461  void setArgs(T0&& t0, T1s&&... t1s)
9462  {
9463  kernel_.setArg(index, t0);
9464  setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
9465  }
9466 
9467  template<int index, typename T0>
9468  void setArgs(T0&& t0)
9469  {
9470  kernel_.setArg(index, t0);
9471  }
9472 
9473  template<int index>
9474  void setArgs()
9475  {
9476  }
9477 
9478 
9479 public:
9480  KernelFunctor(Kernel kernel) : kernel_(kernel)
9481  {}
9482 
9483  KernelFunctor(
9484  const Program& program,
9485  const string name,
9486  cl_int * err = NULL) :
9487  kernel_(program, name.c_str(), err)
9488  {}
9489 
9492 
9498  Event operator() (
9499  const EnqueueArgs& args,
9500  Ts... ts)
9501  {
9502  Event event;
9503  setArgs<0>(std::forward<Ts>(ts)...);
9504 
9505  args.queue_.enqueueNDRangeKernel(
9506  kernel_,
9507  args.offset_,
9508  args.global_,
9509  args.local_,
9510  &args.events_,
9511  &event);
9512 
9513  return event;
9514  }
9515 
9522  Event operator() (
9523  const EnqueueArgs& args,
9524  Ts... ts,
9525  cl_int &error)
9526  {
9527  Event event;
9528  setArgs<0>(std::forward<Ts>(ts)...);
9529 
9530  error = args.queue_.enqueueNDRangeKernel(
9531  kernel_,
9532  args.offset_,
9533  args.global_,
9534  args.local_,
9535  &args.events_,
9536  &event);
9537 
9538  return event;
9539  }
9540 
9541 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9542  cl_int setSVMPointers(const vector<void*> &pointerList)
9543  {
9544  return kernel_.setSVMPointers(pointerList);
9545  }
9546 
9547  template<typename T0, typename... T1s>
9548  cl_int setSVMPointers(const T0 &t0, T1s &... ts)
9549  {
9550  return kernel_.setSVMPointers(t0, ts...);
9551  }
9552 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9553 
9554  Kernel getKernel()
9555  {
9556  return kernel_;
9557  }
9558 };
9559 
9560 namespace compatibility {
9565  template<typename... Ts>
9567  {
9568  typedef KernelFunctor<Ts...> FunctorType;
9569 
9570  FunctorType functor_;
9571 
9572  make_kernel(
9573  const Program& program,
9574  const string name,
9575  cl_int * err = NULL) :
9576  functor_(FunctorType(program, name, err))
9577  {}
9578 
9579  make_kernel(
9580  const Kernel kernel) :
9581  functor_(FunctorType(kernel))
9582  {}
9583 
9586 
9588  typedef Event type_(
9589  const EnqueueArgs&,
9590  Ts...);
9591 
9592  Event operator()(
9593  const EnqueueArgs& enqueueArgs,
9594  Ts... args)
9595  {
9596  return functor_(
9597  enqueueArgs, args...);
9598  }
9599  };
9600 } // namespace compatibility
9601 
9602 
9603 //----------------------------------------------------------------------------------------------------------------------
9604 
9605 #undef CL_HPP_ERR_STR_
9606 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
9607 #undef __GET_DEVICE_INFO_ERR
9608 #undef __GET_PLATFORM_INFO_ERR
9609 #undef __GET_DEVICE_IDS_ERR
9610 #undef __GET_CONTEXT_INFO_ERR
9611 #undef __GET_EVENT_INFO_ERR
9612 #undef __GET_EVENT_PROFILE_INFO_ERR
9613 #undef __GET_MEM_OBJECT_INFO_ERR
9614 #undef __GET_IMAGE_INFO_ERR
9615 #undef __GET_SAMPLER_INFO_ERR
9616 #undef __GET_KERNEL_INFO_ERR
9617 #undef __GET_KERNEL_ARG_INFO_ERR
9618 #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
9619 #undef __GET_PROGRAM_INFO_ERR
9620 #undef __GET_PROGRAM_BUILD_INFO_ERR
9621 #undef __GET_COMMAND_QUEUE_INFO_ERR
9622 
9623 #undef __CREATE_CONTEXT_ERR
9624 #undef __CREATE_CONTEXT_FROM_TYPE_ERR
9625 #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
9626 
9627 #undef __CREATE_BUFFER_ERR
9628 #undef __CREATE_SUBBUFFER_ERR
9629 #undef __CREATE_IMAGE2D_ERR
9630 #undef __CREATE_IMAGE3D_ERR
9631 #undef __CREATE_SAMPLER_ERR
9632 #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
9633 
9634 #undef __CREATE_USER_EVENT_ERR
9635 #undef __SET_USER_EVENT_STATUS_ERR
9636 #undef __SET_EVENT_CALLBACK_ERR
9637 #undef __SET_PRINTF_CALLBACK_ERR
9638 
9639 #undef __WAIT_FOR_EVENTS_ERR
9640 
9641 #undef __CREATE_KERNEL_ERR
9642 #undef __SET_KERNEL_ARGS_ERR
9643 #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
9644 #undef __CREATE_PROGRAM_WITH_BINARY_ERR
9645 #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
9646 #undef __BUILD_PROGRAM_ERR
9647 #undef __CREATE_KERNELS_IN_PROGRAM_ERR
9648 
9649 #undef __CREATE_COMMAND_QUEUE_ERR
9650 #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
9651 #undef __ENQUEUE_READ_BUFFER_ERR
9652 #undef __ENQUEUE_WRITE_BUFFER_ERR
9653 #undef __ENQUEUE_READ_BUFFER_RECT_ERR
9654 #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
9655 #undef __ENQEUE_COPY_BUFFER_ERR
9656 #undef __ENQEUE_COPY_BUFFER_RECT_ERR
9657 #undef __ENQUEUE_READ_IMAGE_ERR
9658 #undef __ENQUEUE_WRITE_IMAGE_ERR
9659 #undef __ENQUEUE_COPY_IMAGE_ERR
9660 #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
9661 #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
9662 #undef __ENQUEUE_MAP_BUFFER_ERR
9663 #undef __ENQUEUE_MAP_IMAGE_ERR
9664 #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
9665 #undef __ENQUEUE_NDRANGE_KERNEL_ERR
9666 #undef __ENQUEUE_TASK_ERR
9667 #undef __ENQUEUE_NATIVE_KERNEL
9668 
9669 #undef __UNLOAD_COMPILER_ERR
9670 #undef __CREATE_SUB_DEVICES_ERR
9671 
9672 #undef __CREATE_PIPE_ERR
9673 #undef __GET_PIPE_INFO_ERR
9674 
9675 #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
9676 
9677 // Extensions
9678 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
9679 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
9680 
9681 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
9682 #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
9683 #endif // CL_HPP_USE_CL_DEVICE_FISSION
9684 
9685 #undef CL_HPP_NOEXCEPT_
9686 #undef CL_HPP_DEFINE_STATIC_MEMBER_
9687 
9688 } // namespace cl
9689 
9690 #endif // CL_HPP_
Memory()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3178
Event type_(const EnqueueArgs &, Ts...)
Function signature of kernel functor with no event dependency.
Definition: cl2.hpp:9588
BufferRenderGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4118
CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7117
Image interface for arrays of 2D images.
Definition: cl2.hpp:4868
Image1DBuffer(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4397
Image interface for arrays of 1D images.
Definition: cl2.hpp:4439
Image2DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4811
cl_int getInfo(cl_context_info name, T *param) const
Wrapper for clGetContextInfo().
Definition: cl2.hpp:2879
Adds constructors and member functions for cl_image_format.
Definition: cl2.hpp:1940
Context(const cl_context &context, bool retainObject=false)
Constructor from cl_context - takes ownership.
Definition: cl2.hpp:2863
Image & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4196
Image3D(const Image3D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5065
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_uint queueSize, cl_int *err=nullptr)
Definition: cl2.hpp:8473
Image2DGL(const Image2DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4836
std::enable_if< std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T argPtr)
setArg overload taking a pointer type
Definition: cl2.hpp:5857
Image1DBuffer(const Image1DBuffer &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4409
Image2D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type row_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 2D Image in a specified context.
Definition: cl2.hpp:4540
Image1D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 1D Image in a specified context.
Definition: cl2.hpp:4270
Image2D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4731
cl_int setSVMPointers(const vector< void *> &pointerList)
Definition: cl2.hpp:5892
The OpenCL C++ bindings are defined within this namespace.
Definition: cl2.hpp:566
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8222
Image(Image &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4219
detail::param_traits< detail::cl_device_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetDeviceInfo() that returns by value.
Definition: cl2.hpp:2095
cl_int unmapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:8985
Context(const Context &ctx)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2801
Local address wrapper for use with Kernel::setArg.
Definition: cl2.hpp:5623
detail::param_traits< detail::cl_pipe_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:5393
cl_int enqueueMapSVM(cl::vector< T, Alloc > &container, cl_bool blocking, cl_map_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7761
Class interface for GL 3D Image Memory objects.
Definition: cl2.hpp:5100
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueMarker(Event *event=NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8069
CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7007
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:2568
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_int *err=nullptr)
Definition: cl2.hpp:8445
cl_int getDevices(cl_device_type type, vector< Device > *devices) const
Gets a list of devices for this platform.
Definition: cl2.hpp:2349
Program(const Context &context, const vector< Device > &devices, const Binaries &binaries, vector< cl_int > *binaryStatus=NULL, cl_int *err=NULL)
Definition: cl2.hpp:6165
Buffer(IteratorType startIterator, IteratorType endIterator, bool readOnly, bool useHostPtr=false, cl_int *err=NULL)
Construct a Buffer from a host container via iterators. IteratorType must be random access...
Definition: cl2.hpp:3724
Class interface for cl_mem.
Definition: cl2.hpp:3174
Pipe(cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a the default context.
Definition: cl2.hpp:5312
cl_int enqueueMigrateMemObjects(const vector< Memory > &memObjects, cl_mem_migration_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7947
Device(const Device &dev)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2058
BufferRenderGL(BufferRenderGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4148
Image2DArray(Image2DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4943
detail::param_traits< detail::cl_image_info, name >::param_type getImageInfo(cl_int *err=NULL) const
Wrapper for clGetImageInfo() that returns by value.
Definition: cl2.hpp:4244
Memory(Memory &&mem) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3222
cl_int enqueueMarkerWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:7894
Image3D(Image3D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5079
cl_int getSupportedImageFormats(cl_mem_flags flags, cl_mem_object_type type, vector< ImageFormat > *formats) const
Gets a list of supported image formats.
Definition: cl2.hpp:2904
BufferRenderGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4109
Image(const Image &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4205
cl_int enqueueFillBuffer(const Buffer &buffer, PatternType pattern, size_type offset, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7352
Image1DArray(const Image1DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4498
Kernel(const Kernel &kernel)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5714
CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:6827
Buffer createSubBuffer(cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void *buffer_create_info, cl_int *err=NULL)
Creates a new buffer object from this.
Definition: cl2.hpp:3842
cl_int enqueueBarrierWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:7924
Buffer(Buffer &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3826
vector< T, cl::SVMAllocator< int, cl::SVMTraitCoarse<> >> coarse_svm_vector
Vector alias to simplify contruction of coarse-grained SVM containers.
Definition: cl2.hpp:3641
static Device getDefault(cl_int *errResult=NULL)
Returns the first device on the default context.
Definition: cl2.hpp:2020
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:8789
Sampler(const Sampler &sam)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5489
cl_int setCallback(cl_int type, void(CL_CALLBACK *pfn_notify)(cl_event, cl_int, void *), void *user_data=NULL)
Registers a user callback function for a specific command execution status.
Definition: cl2.hpp:3081
size_type dimensions() const
Queries the number of dimensions in the range.
Definition: cl2.hpp:5596
Event()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2995
ImageGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5226
Image3D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type depth, size_type row_pitch=0, size_type slice_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 3D Image in a specified context.
Definition: cl2.hpp:4969
cl_int setArg(cl_uint index, const cl::vector< T, Alloc > &argPtr)
setArg overload taking a vector type.
Definition: cl2.hpp:5846
detail::param_traits< detail::cl_profiling_info, name >::param_type getProfilingInfo(cl_int *err=NULL) const
Wrapper for clGetEventProfilingInfo() that returns by value.
Definition: cl2.hpp:3054
Image1D(const Image1D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4325
BufferRenderGL(const BufferRenderGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4134
DeviceCommandQueue(DeviceQueueProperties properties, cl_int *err=NULL)
Definition: cl2.hpp:8273
Pipe(const Context &context, cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a specified context.
Definition: cl2.hpp:5287
Image3D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5040
CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:6956
Image2DGL(Image2DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4850
cl_int getInfo(cl_sampler_info name, T *param) const
Wrapper for clGetSamplerInfo().
Definition: cl2.hpp:5516
Kernel()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5687
Image2DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image2DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:4786
Sampler()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5419
Image1D(Image1D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4339
Program(Program &&program) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6293
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4160
cl_int unloadCompiler()
Wrapper for clUnloadCompiler().
Definition: cl2.hpp:2545
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T &value)
setArg overload taking a POD type
Definition: cl2.hpp:5869
size_type max_size() const CL_HPP_NOEXCEPT_
Definition: cl2.hpp:3506
Image1DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4485
Class interface for cl_event.
Definition: cl2.hpp:2991
vector< T, cl::SVMAllocator< int, cl::SVMTraitFine<> >> fine_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers.
Definition: cl2.hpp:3647
Image2DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4820
NDRange(size_type size0)
Constructs one-dimensional range.
Definition: cl2.hpp:5561
Program(const Context &context, const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6115
vector< T, cl::SVMAllocator< int, cl::SVMTraitAtomic<> >> atomic_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics...
Definition: cl2.hpp:3653
Buffer & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:3803
detail::param_traits< detail::cl_event_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetEventInfo() that returns by value.
Definition: cl2.hpp:3031
Class interface for Pipe Memory Objects.
Definition: cl2.hpp:5274
cl_int enqueueUnmapSVM(cl::pointer< T, D > &ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7836
Image2D(const Image2D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4740
Buffer()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3787
DeviceCommandQueue(const Context &context, const Device &device, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8296
cl_int setArg(cl_uint index, const cl::pointer< T, D > &argPtr)
setArg overload taking a shared_ptr type
Definition: cl2.hpp:5836
Image3DGL(const Image3DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5157
cl_int copy(IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer)
Definition: cl2.hpp:8874
Context(const vector< Device > &devices, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including a list of specified devices.
Definition: cl2.hpp:2654
DeviceCommandQueue(const DeviceCommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8362
Sampler(const cl_sampler &sampler, bool retainObject=false)
Constructor from cl_sampler - takes ownership.
Definition: cl2.hpp:5472
Image()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4180
static cl_int release(cl_device_id device)
Definition: cl2.hpp:1500
cl_int getInfo(cl_device_info name, T *param) const
Wrapper for clGetDeviceInfo().
Definition: cl2.hpp:2085
Event(const cl_event &event, bool retainObject=false)
Constructor from cl_event - takes ownership.
Definition: cl2.hpp:3005
DeviceCommandQueue(const Context &context, const Device &device, cl_uint queueSize, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8320
NDRange(size_type size0, size_type size1)
Constructs two-dimensional range.
Definition: cl2.hpp:5570
Image3DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image3DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:5108
Image2D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4715
Image3D(const cl_mem &image3D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5049
Image2D(Image2D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4754
DeviceCommandQueue interface for device cl_command_queues.
Definition: cl2.hpp:8261
Image1DArray(Image1DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4512
Pipe()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5331
CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:7091
Pipe(const Pipe &pipe)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5356
cl_int getInfo(cl_event_info name, T *param) const
Wrapper for clGetEventInfo().
Definition: cl2.hpp:3021
Image1D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4300
Buffer(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3796
Sampler(Sampler &&sam) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5503
static CommandQueue setDefault(const CommandQueue &default_queue)
Definition: cl2.hpp:7075
Program(const Context &context, const vector< Device > &devices, const string &kernelNames, cl_int *err=NULL)
Definition: cl2.hpp:6229
CommandQueue(const CommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7103
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7706
Kernel(Kernel &&kernel) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5728
cl_int enqueueFillImage(const Image &image, cl_uint4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7552
Image2D(const Context &context, ImageFormat format, const Buffer &sourceBuffer, size_type width, size_type height, size_type row_pitch=0, cl_int *err=nullptr)
Constructs a 2D Image from a buffer.
Definition: cl2.hpp:4611
Program(const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6080
Device(const cl_device_id &device, bool retainObject=false)
Constructor from cl_device_id.
Definition: cl2.hpp:2013
NDRange()
Default constructor - resulting range has zero dimensions.
Definition: cl2.hpp:5552
BufferGL(const BufferGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4037
Image3D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5056
CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:6894
cl_int setDestructorCallback(void(CL_CALLBACK *pfn_notify)(cl_mem, void *), void *user_data=NULL)
Registers a callback function to be called when the memory object is no longer needed.
Definition: cl2.hpp:3271
CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:6698
Buffer(const Context &context, cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in a specified context.
Definition: cl2.hpp:3675
cl_int wait() const
Blocks the calling thread until this event completes.
Definition: cl2.hpp:3069
ImageFormat()
Default constructor - performs no initialization.
Definition: cl2.hpp:1943
Event result_type
Return type of the functor.
Definition: cl2.hpp:9491
Class interface for cl_platform_id.
Definition: cl2.hpp:2203
cl_int setSVMPointers(const std::array< void *, ArrayLength > &pointerList)
Definition: cl2.hpp:5907
Image2DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4918
Buffer(const Buffer &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3812
Image(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4189
Memory(const Memory &mem)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3208
Program(const Program &program)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6279
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4063
static cl_int retain(cl_device_id device)
Definition: cl2.hpp:1489
Image3DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5132
Image2D(const Context &context, cl_channel_order order, const Image &sourceImage, cl_int *err=nullptr)
Constructs a 2D Image from an image.
Definition: cl2.hpp:4661
CommandQueue interface for cl_command_queue.
Definition: cl2.hpp:6632
Image2D(const cl_mem &image2D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4724
cl_int enqueueUnmapSVM(cl::vector< T, Alloc > &container, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7861
Sampler(const Context &context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int *err=NULL)
Constructs a Sampler in a specified context.
Definition: cl2.hpp:5425
Image1D(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4309
pointer allocate(size_type size, typename cl::SVMAllocator< void, SVMTrait >::const_pointer=0)
Definition: cl2.hpp:3464
static DeviceCommandQueue makeDefault(cl_int *err=nullptr)
Definition: cl2.hpp:8415
UserEvent()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3138
DeviceCommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:8350
cl_int enableFineGrainedSystemSVM(bool svmEnabled)
Enable fine-grained system SVM.
Definition: cl2.hpp:5928
ImageFormat(cl_channel_order order, cl_channel_type type)
Initializing constructor.
Definition: cl2.hpp:1946
Pipe(const cl_mem &pipe, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5340
detail::param_traits< detail::cl_context_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetContextInfo() that returns by value.
Definition: cl2.hpp:2889
Pipe(Pipe &&pipe) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5370
cl_int mapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:8976
cl::pointer< T, detail::Deleter< Alloc > > allocate_pointer(const Alloc &alloc_, Args &&... args)
Definition: cl2.hpp:3594
Class interface for user events (a subset of cl_event&#39;s).
Definition: cl2.hpp:3115
bool operator==(SVMAllocator const &rhs)
Definition: cl2.hpp:3534
cl_int enqueueFillImage(const Image &image, cl_int4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7518
Program(const cl_program &program, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:6267
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7811
Kernel(const cl_kernel &kernel, bool retainObject=false)
Constructor from cl_kernel - takes ownership.
Definition: cl2.hpp:5697
vector< std::pair< cl::Device, typename detail::param_traits< detail::cl_program_build_info, name >::param_type > > getBuildInfo(cl_int *err=NULL) const
Definition: cl2.hpp:6417
cl_int getProfilingInfo(cl_profiling_info name, T *param) const
Wrapper for clGetEventProfilingInfo().
Definition: cl2.hpp:3044
Class interface for cl_sampler.
Definition: cl2.hpp:5415
ImageGL(const ImageGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5238
LocalSpaceArg Local(size_type size)
Helper function for generating LocalSpaceArg objects.
Definition: cl2.hpp:5667
Event result_type
Return type of the functor.
Definition: cl2.hpp:9585
detail::param_traits< detail::cl_mem_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:3246
C++ base class for Image Memory objects.
Definition: cl2.hpp:4176
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:8687
Buffer(cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in the default context.
Definition: cl2.hpp:3700
static Context setDefault(const Context &default_context)
Definition: cl2.hpp:2848
cl_int enqueueMapSVM(cl::pointer< T, D > &ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7734
BufferGL(BufferGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4051
Class interface for Buffer Memory Objects.
Definition: cl2.hpp:3664
detail::param_traits< detail::cl_sampler_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetSamplerInfo() that returns by value.
Definition: cl2.hpp:5526
static Device setDefault(const Device &default_device)
Definition: cl2.hpp:2038
Image1DBuffer(Image1DBuffer &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4423
Class interface for specifying NDRange values.
Definition: cl2.hpp:5544
Class interface for 2D Image Memory objects.
Definition: cl2.hpp:4533
ImageGL(ImageGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5252
Class interface for 1D Image Memory objects.
Definition: cl2.hpp:4263
Class interface for cl_kernel.
Definition: cl2.hpp:5681
NDRange(size_type size0, size_type size1, size_type size2)
Constructs three-dimensional range.
Definition: cl2.hpp:5579
static cl_int waitForEvents(const vector< Event > &events)
Blocks the calling thread until every event specified is complete.
Definition: cl2.hpp:3101
Class interface for 3D Image Memory objects.
Definition: cl2.hpp:4962
Class interface for GL Render Buffer Memory Objects.
Definition: cl2.hpp:4081
general image interface for GL interop. We abstract the 2D and 3D GL images into a single instance he...
Definition: cl2.hpp:5191
cl_int setStatus(cl_int status)
Sets the execution status of a user event object.
Definition: cl2.hpp:3144
detail::param_traits< detail::cl_platform_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetPlatformInfo() that returns by value.
Definition: cl2.hpp:2334
Device()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2007
Context()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2856
Image2DArray(const Image2DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4929
Class interface for cl_device_id.
Definition: cl2.hpp:1970
Context(Context &&ctx) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2815
static Context getDefault(cl_int *err=NULL)
Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
Definition: cl2.hpp:2831
Context(cl_device_type type, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including all or a subset of devices of a specified type.
Definition: cl2.hpp:2715
Memory(const cl_mem &memory, bool retainObject)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3191
Platform(const cl_platform_id &platform, bool retainObject=false)
Constructor from cl_platform_id.
Definition: cl2.hpp:2285
Device(Device &&dev) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2072
BufferRenderGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferRenderGL in a specified context, from a given GL Renderbuffer. ...
Definition: cl2.hpp:4089
cl_int getImageInfo(cl_image_info name, T *param) const
Wrapper for clGetImageInfo().
Definition: cl2.hpp:4234
BufferGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4012
UserEvent(const Context &context, cl_int *err=NULL)
Constructs a user event on a given context.
Definition: cl2.hpp:3122
cl_int enqueueFillImage(const Image &image, cl_float4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7484
CommandQueue(QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:6764
cl_int getInfo(cl_pipe_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:5383
Image interface for 1D buffer images.
Definition: cl2.hpp:4355
Program interface that implements cl_program.
Definition: cl2.hpp:5989
Image3DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5141
Image3DGL(Image3DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5171
cl_int getInfo(cl_platform_info name, string *param) const
Wrapper for clGetPlatformInfo().
Definition: cl2.hpp:2324
Class interface for GL Buffer Memory Objects.
Definition: cl2.hpp:3984
DeviceCommandQueue(DeviceCommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8376
cl_int copy(const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator)
Definition: cl2.hpp:8945
static Platform setDefault(const Platform &default_platform)
Definition: cl2.hpp:2316
Class interface for GL 2D Image Memory objects.
Definition: cl2.hpp:4778
Class interface for cl_context.
Definition: cl2.hpp:2582
BufferGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferGL in a specified context, from a given GL buffer.
Definition: cl2.hpp:3992
Platform()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2276
Memory & operator=(const cl_mem &rhs)
Assignment operator from cl_mem - takes ownership.
Definition: cl2.hpp:3199
size_type size() const
Returns the size of the object in bytes based on the.
Definition: cl2.hpp:5603
BufferGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4021
cl_int getInfo(cl_mem_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:3236