AI-TensorFlow-Libtensorflow
    
    
  
  
  
view release on metacpan or search on metacpan
         "eg",
         "examples",
         "inc",
         "share",
         "t",
         "xt",
         "maint"
      ]
   },
   "prereqs" : {
      "configure" : {
         "requires" : {
            "ExtUtils::MakeMaker" : "0",
            "perl" : "5.014"
         }
      },
      "develop" : {
         "requires" : {
            "Moose" : "0",
            "Moose::Role" : "0",
            "Pod::Simple::Search" : "0",
    
  
  
  build_requires:
  Data::Dumper: '0'
  PDL: '0'
  PDL::Core: '0'
  Path::Tiny: '0'
  Test2::V0: '0'
  Test::More: '0'
  aliased: '0'
  lib: '0'
  perl: '5.014'
configure_requires:
  ExtUtils::MakeMaker: '0'
  perl: '5.014'
dynamic_config: 0
generated_by: 'Dist::Zilla version 6.030, CPAN::Meta::Converter version 2.150010'
license: apache
meta-spec:
  url: http://module-build.sourceforge.net/META-spec-v1.4.html
  version: '1.4'
name: AI-TensorFlow-Libtensorflow
no_index:
    
  
  
  lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
=back
  /* From <tensorflow/c/c_api.h> */
  TF_CAPI_EXPORT extern void TF_UpdateEdge(TF_Graph* graph, TF_Output new_src,
                                           TF_Input dst, TF_Status* status);
=head2 TF_NewServer
=over 2
  Creates a new in-process TensorFlow server configured using a serialized
  ServerDef protocol buffer provided via `proto` and `proto_len`.
  
  The server will not serve any requests until TF_ServerStart is invoked.
  The server will stop serving requests once TF_ServerStop or
  TF_DeleteServer is invoked.
=back
  /* From <tensorflow/c/c_api.h> */
  TF_CAPI_EXPORT extern TF_Server* TF_NewServer(const void* proto,
    
  
  
  lib/AI/TensorFlow/Libtensorflow/Manual/CAPI.pod view on Meta::CPAN
  /* From <tensorflow/c/eager/c_api_experimental.h> */
  TF_CAPI_EXPORT extern void TFE_GetExecutedOpNames(TFE_Context* ctx,
                                                    TF_Buffer* buf,
                                                    TF_Status* status);
=head2 TFE_SetLogicalCpuDevices
=over 2
  Set logical devices to the context's device manager.
  If logical devices are already configured at context initialization
  through TFE_ContextOptions, this method should not be called.
=back
  /* From <tensorflow/c/eager/c_api_experimental.h> */
  TF_CAPI_EXPORT extern void TFE_SetLogicalCpuDevices(TFE_Context* ctx,
                                                      int num_cpus,
                                                      const char* prefix,
                                                      TF_Status* status);
    
  
  
  
( run in 0.248 second using v1.01-cache-2.11-cpan-a1d94b6210f )