AI-TensorFlow-Libtensorflow

 view release on metacpan or  search on metacpan

lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod  view on Meta::CPAN

  /* From <tensorflow/c/experimental/saved_model/public/signature_def_function_metadata.h> */
  TF_CAPI_EXPORT extern const TF_SignatureDefParamList*
  TF_SignatureDefFunctionMetadataReturns(
      const TF_SignatureDefFunctionMetadata* list);

=head2 TF_EnableXLACompilation

=over 2

  When `enable` is true, set
  tensorflow.ConfigProto.OptimizerOptions.global_jit_level to ON_1, and also
  set XLA flag values to prepare for XLA compilation. Otherwise set
  global_jit_level to OFF.
  
  This and the next API are syntax sugar over TF_SetConfig(), and is used by
  clients that cannot read/write the tensorflow.ConfigProto proto.
  TODO: Migrate to TF_CreateConfig() below.

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT extern void TF_EnableXLACompilation(TF_SessionOptions* options,
                                                     unsigned char enable);

=head2 TF_SetXlaEnableLazyCompilation

=over 2

  Set XLA's internal BuildXlaOpsPassFlags.tf_xla_enable_lazy_compilation to the
  value of 'enabled'. Also returns the original value of that flag.
  
  Use in tests to allow XLA to fallback to TF classic. This has global effect.

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT unsigned char TF_SetXlaEnableLazyCompilation(
      unsigned char enable);

=head2 TF_SetTfXlaCpuGlobalJit

=over 2

lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod  view on Meta::CPAN

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT unsigned char TF_SetTfXlaCpuGlobalJit(unsigned char enable);

=head2 TF_SetXlaAutoJitMode

=over 2

  Sets XLA's auto jit mode according to the specified string, which is parsed
  as if passed in XLA_FLAGS. This has global effect.

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT void TF_SetXlaAutoJitMode(const char* mode);

=head2 TF_GetXlaAutoJitEnabled

=over 2

lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod  view on Meta::CPAN


=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT unsigned char TF_GetXlaAutoJitEnabled();

=head2 TF_SetXlaMinClusterSize

=over 2

  Sets XLA's minimum cluster size. This has global effect.

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT void TF_SetXlaMinClusterSize(int size);

=head2 TF_GetXlaConstantFoldingDisabled

=over 2

lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod  view on Meta::CPAN

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT void TF_SetXlaConstantFoldingDisabled(
      unsigned char should_enable);

=head2 TF_CreateConfig

=over 2

  Create a serialized tensorflow.ConfigProto proto, where:
  
  a) ConfigProto.optimizer_options.global_jit_level is set to ON_1 if
  `enable_xla_compilation` is non-zero, and OFF otherwise.
  b) ConfigProto.gpu_options.allow_growth is set to `gpu_memory_allow_growth`.
  c) ConfigProto.device_count is set to `num_cpu_devices`.

=back

  /* From <tensorflow/c/c_api_experimental.h> */
  TF_CAPI_EXPORT extern TF_Buffer* TF_CreateConfig(
      unsigned char enable_xla_compilation, unsigned char gpu_memory_allow_growth,
      unsigned int num_cpu_devices);



( run in 0.607 second using v1.01-cache-2.11-cpan-49f99fa48dc )