AI-Ollama-Client

 view release on metacpan or  search on metacpan

ollama/ollama-curated.yaml  view on Meta::CPAN

openapi: 3.1.0
# https://github.com/davidmigloz/langchain_dart/blob/main/packages/ollama_dart/oas/ollama-curated.yaml

info:
  title: Ollama API
  description: API Spec for Ollama API. Please see https://github.com/jmorganca/ollama/blob/main/docs/api.md for more details.
  version: 0.1.9

#servers:
#  - url: http://localhost:11434/api
#    description: Ollama server URL

tags:
  - name: Completions
    description: Given a prompt, the model will generate a completion.
  - name: Chat
    description: Given a list of messages comprising a conversation, the model will return a response.
  - name: Embeddings
    description: Get a vector representation of a given input.
  - name: Models
    description: List and describe the various models available.

paths:
  /generate:
    post:
      operationId: generateCompletion
      tags:
        - Completions
      summary: Generate a response for a given prompt with a provided model.
      description: The final response object will include statistics and additional data from the request.
      requestBody:
        content:
          application/json:
            schema:
              $ref: '#/components/schemas/GenerateCompletionRequest'
      responses:
        '200':
          description: Successful operation.
          content:
            application/x-ndjson:
              schema:
                $ref: '#/components/schemas/GenerateCompletionResponse'
  /chat:
    post:
      operationId: generateChatCompletion
      tags:
        - Chat
      summary: Generate the next message in a chat with a provided model.
      description: This is a streaming endpoint, so there will be a series of responses. The final response object will include statistics and additional data from the request.
      requestBody:
        content:
          application/json:
            schema:
              $ref: '#/components/schemas/GenerateChatCompletionRequest'
      responses:
        '200':
          description: Successful operation.
          content:
            application/x-ndjson:
              schema:
                $ref: '#/components/schemas/GenerateChatCompletionResponse'
  /embeddings:
    post:
      operationId: generateEmbedding
      tags:
        - Embeddings
      summary: Generate embeddings from a model.
      requestBody:
        content:
          application/json:
            schema:
              $ref: '#/components/schemas/GenerateEmbeddingRequest'
      responses:
        '200':
          description: Successful operation.
          content:
            application/json:
              schema:
                $ref: '#/components/schemas/GenerateEmbeddingResponse'
  /create:
    post:
      operationId: createModel
      tags:
        - Models
      summary: Create a model from a Modelfile.
      description: It is recommended to set `modelfile` to the content of the Modelfile rather than just set `path`. This is a requirement for remote create. Remote model creation should also create any file blobs, fields such as `FROM` and `ADAPTER`...
      requestBody:
        description: Create a new model from a Modelfile.
        content:
          application/json:
            schema:
              $ref: '#/components/schemas/CreateModelRequest'
      responses:
        '200':
          description: Successful operation.
          content:
            application/x-ndjson:
              schema:
                $ref: '#/components/schemas/CreateModelResponse'
  /tags:
    get:
      operationId: listModels
      tags:
        - Models
      summary: List models that are available locally.
      responses:
        '200':
          description: Successful operation.
          content:

 view all matches for this distribution
 view release on metacpan -  search on metacpan

( run in 1.049 second using v1.00-cache-2.02-grep-82fe00e-cpan-1310916c57ae )