AI-Ollama-Client

 view release on metacpan or  search on metacpan

lib/AI/Ollama/GenerateChatCompletionResponse.pm  view on Meta::CPAN

package AI::Ollama::GenerateChatCompletionResponse 0.05;
# DO NOT EDIT! This is an autogenerated file.

use 5.020;
use Moo 2;
use experimental 'signatures';
use stable 'postderef';
use Types::Standard qw(Enum Str Bool Num Int HashRef ArrayRef);
use MooX::TypeTiny;

use namespace::clean;

=encoding utf8

=head1 NAME

AI::Ollama::GenerateChatCompletionResponse -

=head1 SYNOPSIS

  my $obj = AI::Ollama::GenerateChatCompletionResponse->new();
  ...

=cut

sub as_hash( $self ) {
    return { $self->%* }
}

=head1 PROPERTIES

=head2 C<< created_at >>

Date on which a model was created.

=cut

has 'created_at' => (
    is       => 'ro',
    isa      => Str,
);

=head2 C<< done >>

Whether the response has completed.

=cut

has 'done' => (
    is       => 'ro',
);

=head2 C<< eval_count >>

Number of tokens the response.

=cut

has 'eval_count' => (
    is       => 'ro',
    isa      => Int,
);

=head2 C<< eval_duration >>

Time in nanoseconds spent generating the response.

=cut

has 'eval_duration' => (
    is       => 'ro',
    isa      => Int,
);

=head2 C<< load_duration >>

Time spent in nanoseconds loading the model.

=cut

has 'load_duration' => (
    is       => 'ro',
    isa      => Int,
);

=head2 C<< message >>



( run in 1.011 second using v1.01-cache-2.11-cpan-39bf76dae61 )