AI-Ollama-Client

 view release on metacpan or  search on metacpan

LICENSE  view on Meta::CPAN


General Provisions

(10)  Any use, modification, and distribution of the Standard or
Modified Versions is governed by this Artistic License. By using,
modifying or distributing the Package, you accept this license. Do not
use, modify, or distribute the Package, if you do not accept this
license.

(11)  If your Modified Version has been derived from a Modified
Version made by someone other than you, you are nevertheless required
to ensure that your Modified Version complies with the requirements of
this license.

(12)  This license does not grant you the right to use any trademark,
service mark, tradename, or logo of the Copyright Holder.

(13)  This license includes the non-exclusive, worldwide,
free-of-charge patent license to make, have made, use, offer to sell,
sell, import and otherwise transfer the Package with respect to any
patent claims licensable by the Copyright Holder that are necessarily

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN


the SHA256 digest of the blob

=back



=cut

sub build_checkBlob_request( $self, %options ) {
    croak "Missing required parameter 'digest'"
        unless exists $options{ 'digest' };

    my $method = 'HEAD';
    my $template = URI::Template->new( '/blobs/{digest}' );
    my $path = $template->process(
              'digest' => delete $options{'digest'},
    );
    my $url = Mojo::URL->new( $self->server . $path );

    my $tx = $self->ua->build_tx(

lib/AI/Ollama/Client/Impl.pm  view on Meta::CPAN


the SHA256 digest of the blob

=back



=cut

sub build_createBlob_request( $self, %options ) {
    croak "Missing required parameter 'digest'"
        unless exists $options{ 'digest' };

    my $method = 'POST';
    my $template = URI::Template->new( '/blobs/{digest}' );
    my $path = $template->process(
              'digest' => delete $options{'digest'},
    );
    my $url = Mojo::URL->new( $self->server . $path );

    my $body = delete $options{ body } // '';

lib/AI/Ollama/CopyModelRequest.pm  view on Meta::CPAN


=head2 C<< destination >>

Name of the new model.

=cut

has 'destination' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< source >>

Name of the model to copy.

=cut

has 'source' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);


1;

lib/AI/Ollama/CreateModelRequest.pm  view on Meta::CPAN


=head2 C<< modelfile >>

The contents of the Modelfile.

=cut

has 'modelfile' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< name >>

The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'name' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< stream >>

If `false` the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.

=cut

has 'stream' => (
    is       => 'ro',

lib/AI/Ollama/DeleteModelRequest.pm  view on Meta::CPAN


The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'name' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);


1;

lib/AI/Ollama/GenerateChatCompletionRequest.pm  view on Meta::CPAN


=head2 C<< messages >>

The messages of the chat, this can be used to keep a chat memory

=cut

has 'messages' => (
    is       => 'ro',
    isa      => ArrayRef[HashRef],
    required => 1,
);

=head2 C<< model >>

The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'model' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< options >>

Additional model parameters listed in the documentation for the Modelfile such as `temperature`.

=cut

has 'options' => (
    is       => 'ro',

lib/AI/Ollama/GenerateCompletionRequest.pm  view on Meta::CPAN


The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'model' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< options >>

Additional model parameters listed in the documentation for the Modelfile such as `temperature`.

=cut

has 'options' => (
    is       => 'ro',

lib/AI/Ollama/GenerateCompletionRequest.pm  view on Meta::CPAN


=head2 C<< prompt >>

The prompt to generate a response.

=cut

has 'prompt' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< raw >>

If `true` no formatting will be applied to the prompt and no context will be returned.

You may choose to use the `raw` parameter if you are specifying a full templated prompt in your request to the API, and are managing history yourself.

=cut

lib/AI/Ollama/GenerateEmbeddingRequest.pm  view on Meta::CPAN


The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'model' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< options >>

Additional model parameters listed in the documentation for the Modelfile such as `temperature`.

=cut

has 'options' => (
    is       => 'ro',

lib/AI/Ollama/GenerateEmbeddingRequest.pm  view on Meta::CPAN


=head2 C<< prompt >>

Text to generate embeddings for.

=cut

has 'prompt' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);


1;

lib/AI/Ollama/Message.pm  view on Meta::CPAN


=head2 C<< content >>

The content of the message

=cut

has 'content' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< images >>

(optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)

=cut

has 'images' => (
    is       => 'ro',

lib/AI/Ollama/Message.pm  view on Meta::CPAN


=cut

has 'role' => (
    is       => 'ro',
    isa      => Enum[
        "system",
        "user",
        "assistant",
    ],
    required => 1,
);


1;

lib/AI/Ollama/ModelInfoRequest.pm  view on Meta::CPAN


The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'name' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);


1;

lib/AI/Ollama/PullModelRequest.pm  view on Meta::CPAN


The model name.

Model names follow a `model:tag` format. Some examples are `orca-mini:3b-q4_1` and `llama2:70b`. The tag is optional and, if not provided, will default to `latest`. The tag is used to identify a specific version.

=cut

has 'name' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< stream >>

If `false` the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.

=cut

has 'stream' => (
    is       => 'ro',

lib/AI/Ollama/PushModelRequest.pm  view on Meta::CPAN


=head2 C<< name >>

The name of the model to push in the form of <namespace>/<model>:<tag>.

=cut

has 'name' => (
    is       => 'ro',
    isa      => Str,
    required => 1,
);

=head2 C<< stream >>

If `false` the response will be returned as a single response object, otherwise the response will be streamed as a series of objects.

=cut

has 'stream' => (
    is       => 'ro',

ollama/ollama-curated.json  view on Meta::CPAN

{"openapi":"3.0.3","components":{"schemas":{"PushModelResponse":{"properties":{"total":{"type":"integer","description":"total size of the model","example":"2142590208"},"status":{"$ref":"#/components/schemas/PushModelStatus"},"digest":{"example":"sha...

ollama/ollama-curated.yaml  view on Meta::CPAN

    head:
      operationId: checkBlob
      tags:
        - Models
      summary: Check to see if a blob exists on the Ollama server which is useful when creating models.
      parameters:
        - in: path
          name: digest
          schema:
            type: string
          required: true
          description: the SHA256 digest of the blob
          example: sha256:c8edda1f17edd2f1b60253b773d837bda7b9d249a61245931a4d7c9a8d350250
      responses:
        '200':
          description: Blob exists on the server
        '404':
          description: Blob was not found
    post:
      operationId: createBlob
      tags:
        - Models
      summary: Create a blob from a file. Returns the server file path.
      parameters:
        - in: path
          name: digest
          schema:
            type: string
          required: true
          description: the SHA256 digest of the blob
          example: sha256:c8edda1f17edd2f1b60253b773d837bda7b9d249a61245931a4d7c9a8d350250
      requestBody:
        content:
          application/octet-stream:
            schema:
              type: string
              format: binary
      responses:
        '201':

ollama/ollama-curated.yaml  view on Meta::CPAN

          default: false
        keep_alive:
          type: integer
          description: &keep_alive |
            How long (in minutes) to keep the model loaded in memory.

            - If set to a positive duration (e.g. 20), the model will stay loaded for the provided duration.
            - If set to a negative duration (e.g. -1), the model will stay loaded indefinitely.
            - If set to 0, the model will be unloaded immediately once finished.
            - If not set, the model will stay loaded for 5 minutes by default
      required:
        - model
        - prompt
    RequestOptions:
      type: object
      description: Additional model parameters listed in the documentation for the Modelfile such as `temperature`.
      properties:
        num_keep:
          type: integer
          description: |
            Number of tokens to keep from the prompt.

ollama/ollama-curated.yaml  view on Meta::CPAN

          $ref: '#/components/schemas/ResponseFormat'
        options:
          $ref: '#/components/schemas/RequestOptions'
        stream:
          type: boolean
          description: *stream
          default: false
        keep_alive:
          type: integer
          description: *keep_alive
      required:
        - model
        - messages
    GenerateChatCompletionResponse:
      type: object
      description: The response class for the chat endpoint.
      properties:
        message:
          $ref: '#/components/schemas/Message'
        model:
          type: string

ollama/ollama-curated.yaml  view on Meta::CPAN

          type: string
          description: The content of the message
          example: Why is the sky blue?
        images:
          type: array
          description: (optional) a list of Base64-encoded images to include in the message (for multimodal models such as llava)
          items:
            type: string
            description: Base64-encoded image (for multimodal models such as llava)
            example: iVBORw0KGgoAAAANSUhEUgAAAAkAAAANCAIAAAD0YtNRAAAABnRSTlMA/AD+APzoM1ogAAAAWklEQVR4AWP48+8PLkR7uUdzcMvtU8EhdykHKAciEXL3pvw5FQIURaBDJkARoDhY3zEXiCgCHbNBmAlUiyaBkENoxZSDWnOtBmoAQu7TnT+3WuDOA7KBIkAGAGwiNeqjusp/AAAAAElFTkSuQmCC
      required:
        - role
        - content
    GenerateEmbeddingRequest:
      description: Generate embeddings from a model.
      type: object
      properties:
        model:
          type: string
          description: *model_name
          example: llama2:7b
        prompt:
          type: string
          description: Text to generate embeddings for.
          example: 'Here is an article about llamas...'
        options:
          $ref: '#/components/schemas/RequestOptions'
      required:
        - model
        - prompt
    GenerateEmbeddingResponse:
      type: object
      description: Returns the embedding information.
      properties:
        embedding:
          type: array
          description: The embedding for the prompt.
          items:

ollama/ollama-curated.yaml  view on Meta::CPAN

          description: *model_name
          example: mario
        modelfile:
          type: string
          description: The contents of the Modelfile.
          example: FROM llama2\nSYSTEM You are mario from Super Mario Bros.
        stream:
          type: boolean
          description: *stream
          default: false
      required:
        - name
        - modelfile
    CreateModelResponse:
      description: Response object for creating a model. When finished, `status` is `success`.
      type: object
      properties:
        status:
          $ref: '#/components/schemas/CreateModelStatus'
    CreateModelStatus:
      type: string

ollama/ollama-curated.yaml  view on Meta::CPAN

          description: Size of the model on disk.
          example: 7323310500
    ModelInfoRequest:
      description: Request class for the show model info endpoint.
      type: object
      properties:
        name:
          type: string
          description: *model_name
          example: llama2:7b
      required:
        - name
    ModelInfo:
      description: Details about a model including modelfile, template, parameters, license, and system prompt.
      type: object
      properties:
        license:
          type: string
          description: The model's license.
          example: <contents of license block>
        modelfile:

ollama/ollama-curated.yaml  view on Meta::CPAN

      type: object
      properties:
        source:
          type: string
          description: Name of the model to copy.
          example: llama2:7b
        destination:
          type: string
          description: Name of the new model.
          example: llama2-backup
      required:
        - source
        - destination
    DeleteModelRequest:
      description: Request class for deleting a model.
      type: object
      properties:
        name:
          type: string
          description: *model_name
          example: llama2:13b
      required:
        - name
    PullModelRequest:
      description: Request class for pulling a model.
      type: object
      properties:
        name:
          type: string
          description: *model_name
          example: llama2:7b
        insecure:
          type: boolean
          description: |
            Allow insecure connections to the library.

            Only use this if you are pulling from your own library during development.
          default: false
        stream:
          type: boolean
          description: *stream
          default: false
      required:
        - name
    PullModelResponse:
      description: |
        Response class for pulling a model.

        The first object is the manifest. Then there is a series of downloading responses. Until any of the download is completed, the `completed` key may not be included.

        The number of files to be downloaded depends on the number of layers specified in the manifest.
      type: object
      properties:

ollama/ollama-curated.yaml  view on Meta::CPAN

          type: boolean
          description: |
            Allow insecure connections to the library.

            Only use this if you are pushing to your library during development.
          default: false
        stream:
          type: boolean
          description: *stream
          default: false
      required:
        - name
    PushModelResponse:
      type: object
      description: Response class for pushing a model.
      properties:
        status:
          $ref: '#/components/schemas/PushModelStatus'
        digest:
          type: string
          description: the model's digest

openapi/petstore-expanded.yaml  view on Meta::CPAN

      description: |
        Returns all pets from the system that the user has access to
        Nam sed condimentum est. Maecenas tempor sagittis sapien, nec rhoncus sem sagittis sit amet. Aenean at gravida augue, ac iaculis sem. Curabitur odio lorem, ornare eget elementum nec, cursus id lectus. Duis mi turpis, pulvinar ac eros ac, tinc...

        Sed tempus felis lobortis leo pulvinar rutrum. Nam mattis velit nisl, eu condimentum ligula luctus nec. Phasellus semper velit eget aliquet faucibus. In a mattis elit. Phasellus vel urna viverra, condimentum lorem id, rhoncus nibh. Ut pellent...
      operationId: findPets
      parameters:
        - name: tags
          in: query
          description: tags to filter by
          required: false
          style: form
          schema:
            type: array
            items:
              type: string
        - name: limit
          in: query
          description: maximum number of results to return
          required: false
          schema:
            type: integer
            format: int32
      responses:
        '200':
          description: pet response
          content:
            application/json:
              schema:
                type: array

openapi/petstore-expanded.yaml  view on Meta::CPAN

          description: unexpected error
          content:
            application/json:
              schema:
                $ref: '#/components/schemas/Error'
    post:
      description: Creates a new pet in the store. Duplicates are allowed
      operationId: addPet
      requestBody:
        description: Pet to add to the store
        required: true
        content:
          application/json:
            schema:
              $ref: '#/components/schemas/NewPet'
      responses:
        '200':
          description: pet response
          content:
            application/json:
              schema:

openapi/petstore-expanded.yaml  view on Meta::CPAN

              schema:
                $ref: '#/components/schemas/Error'
  /pets/{id}:
    get:
      description: Returns a user based on a single ID, if the user does not have access to the pet
      operationId: find pet by id
      parameters:
        - name: id
          in: path
          description: ID of pet to fetch
          required: true
          schema:
            type: integer
            format: int64
      responses:
        '200':
          description: pet response
          content:
            application/json:
              schema:
                $ref: '#/components/schemas/Pet'

openapi/petstore-expanded.yaml  view on Meta::CPAN

            application/json:
              schema:
                $ref: '#/components/schemas/Error'
    delete:
      description: deletes a single pet based on the ID supplied
      operationId: deletePet
      parameters:
        - name: id
          in: path
          description: ID of pet to delete
          required: true
          schema:
            type: integer
            format: int64
      responses:
        '204':
          description: pet deleted
        default:
          description: unexpected error
          content:
            application/json:
              schema:
                $ref: '#/components/schemas/Error'
components:
  schemas:
    Pet:
      allOf:
        - $ref: '#/components/schemas/NewPet'
        - type: object
          required:
          - id
          properties:
            id:
              type: integer
              format: int64

    NewPet:
      type: object
      required:
        - name  
      properties:
        name:
          type: string
        tag:
          type: string    

    Error:
      type: object
      required:
        - code
        - message
      properties:
        code:
          type: integer
          format: int32
        message:
          type: string

xt/99-minimumversion.t  view on Meta::CPAN


eval {
  #require Test::MinimumVersion::Fast;
  require Test::MinimumVersion;
  Test::MinimumVersion->import;
};

my @files;

if ($@) {
  plan skip_all => "Test::MinimumVersion required for testing minimum Perl version";
}
else {
  all_minimum_version_from_metajson_ok();
}

xt/99-pod.t  view on Meta::CPAN

  Test::Pod->import;
};

require './Makefile.PL';
# Loaded from Makefile.PL
our %module = get_module_info();

my @files;

if ($@) {
  plan skip_all => "Test::Pod required for testing POD";
}
elsif ($Test::Pod::VERSION < 0.95) {
  plan skip_all => "Test::Pod 0.95 required for testing POD";
}
else {
  my $blib = File::Spec->catfile(qw(blib lib));
  find(\&wanted, grep { -d } ($blib));

  if( my $exe = $module{EXE_FILES}) {
    push @files, @$exe;
  };

  plan tests => scalar @files;

xt/meta-lint.t  view on Meta::CPAN

use Test::More;

eval {
  #require Test::MinimumVersion::Fast;
  require Parse::CPAN::Meta;
  Parse::CPAN::Meta->import();
  require CPAN::Meta::Validator;
  CPAN::Meta::Validator->VERSION(2.15);
};
if ($@) {
  plan skip_all => "CPAN::Meta::Validator version 2.15 required for testing META files";
}
else {
  plan tests => 4;
}

use lib '.';
our %module;
require 'Makefile.PL';
# Loaded from Makefile.PL
%module = get_module_info();



( run in 0.576 second using v1.01-cache-2.11-cpan-0a6323c29d9 )