AI-Anthropic
view release on metacpan or search on metacpan
},
"name" : "AI-Anthropic",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"runtime" : {
"requires" : {
"Carp" : "0",
"HTTP::Tiny" : "0.070",
"JSON::PP" : "2.0",
"MIME::Base64" : "0",
"perl" : "5.010"
}
},
"test" : {
"requires" : {
"Test::More" : "0.88"
}
}
},
"release_status" : "stable",
"resources" : {
"bugtracker" : {
"web" : "https://github.com/yourusername/AI-Anthropic/issues"
},
"repository" : {
"type" : "git",
"url" : "https://github.com/yourusername/AI-Anthropic.git",
"web" : "https://github.com/yourusername/AI-Anthropic"
}
},
"version" : "0.01",
---
abstract: 'Perl interface to Anthropic Claude API'
author:
- 'Your Name <your@email.com>'
build_requires:
ExtUtils::MakeMaker: '0'
Test::More: '0.88'
configure_requires:
ExtUtils::MakeMaker: '0'
dynamic_config: 1
generated_by: 'ExtUtils::MakeMaker version 7.76, CPAN::Meta::Converter version 2.150010'
keywords:
- anthropic
- claude
- ai
- llm
- api
- chatbot
- language-model
license: perl
meta-spec:
url: http://module-build.sourceforge.net/META-spec-v1.4.html
version: '1.4'
name: AI-Anthropic
no_index:
directory:
- t
- inc
requires:
Carp: '0'
HTTP::Tiny: '0.070'
JSON::PP: '2.0'
MIME::Base64: '0'
perl: '5.010'
resources:
bugtracker: https://github.com/yourusername/AI-Anthropic/issues
repository: https://github.com/yourusername/AI-Anthropic.git
version: '0.01'
x_serialization_backend: 'CPAN::Meta::YAML version 0.020'
Makefile.PL view on Meta::CPAN
'MIME::Base64' => '0',
'Carp' => '0',
},
TEST_REQUIRES => {
'Test::More' => '0.88',
},
META_MERGE => {
'meta-spec' => { version => 2 },
resources => {
repository => {
type => 'git',
url => 'https://github.com/yourusername/AI-Anthropic.git',
web => 'https://github.com/yourusername/AI-Anthropic',
},
bugtracker => {
web => 'https://github.com/yourusername/AI-Anthropic/issues',
},
},
keywords => [
use AI::Anthropic;
my $claude = AI::Anthropic->new(
api_key => 'sk-ant-api03-your-key-here',
);
# Simple message
print $claude->message("What is the meaning of life?");
# With system prompt
my $response = $claude->chat(
system => 'You are a helpful Perl programmer.',
messages => [
{ role => 'user', content => 'How do I read a file?' },
],
);
print "Response: ", $response->text, "\n";
print "Tokens: ", $response->total_tokens, "\n";
```
## Installation
From CPAN:
```bash
cpanm AI::Anthropic
```
Or manually:
```bash
perl Makefile.PL
make
make test
make install
```
## Features
- **Messages API** - Full support for Claude chat completions
- **Streaming** - Real-time response streaming with callbacks
- **Vision** - Send images (from files, URLs, or base64)
- **Tool Use** - Function calling support
- **All Models** - Claude 4 Opus, Sonnet, Haiku and older models
## Quick Start
```perl
use AI::Anthropic;
my $claude = AI::Anthropic->new(
print $chunk;
STDOUT->flush;
},
);
```
## Vision (Images)
```perl
# From file
my $response = $claude->chat(
messages => [
{
role => 'user',
content => [
{ type => 'text', text => 'What is in this image?' },
{ type => 'image', path => '/path/to/image.jpg' },
],
},
],
);
# From URL
my $response = $claude->chat(
messages => [
{
role => 'user',
content => [
{ type => 'text', text => 'Describe this image' },
{ type => 'image', url => 'https://example.com/image.png' },
],
},
],
);
```
## Tool Use (Function Calling)
```perl
my $response = $claude->chat(
messages => [
{ role => 'user', content => 'What is the weather in Baku?' },
],
tools => [
{
name => 'get_weather',
description => 'Get current weather for a location',
input_schema => {
type => 'object',
properties => {
required => ['location'],
},
},
],
);
```
## Response Object
```perl
my $response = $claude->message("Hello");
$response->text; # Response text
$response->model; # Model used
$response->stop_reason; # Why generation stopped
$response->input_tokens; # Tokens in prompt
$response->output_tokens; # Tokens in response
$response->total_tokens; # Total tokens
$response->raw_response; # Full API response hashref
# Stringifies to text
print "$response";
```
## Configuration
```perl
my $claude = AI::Anthropic->new(
api_key => 'sk-ant-...', # or use ANTHROPIC_API_KEY env
model => 'claude-opus-4-20250514', # default: claude-sonnet-4-20250514
max_tokens => 8192, # default: 4096
timeout => 300, # default: 120 seconds
- **Full featured** - Streaming, vision, tools - all supported
- **Well documented** - POD and examples included
## See Also
- [Anthropic API Documentation](https://docs.anthropic.com/)
- [OpenAI::API](https://metacpan.org/pod/OpenAI::API) - Similar module for OpenAI
## Contributing
Pull requests welcome! Please include tests for new features.
## License
This is free software; you can redistribute it and/or modify it under the same terms as the Perl 5 programming language system itself.
## Author
Your Name <your@email.com>
examples/basic.pl view on Meta::CPAN
);
say "=" x 50;
say "AI::Anthropic Example";
say "=" x 50;
# Example 1: Simple message
say "\n1. Simple message:";
say "-" x 30;
my $response = $claude->message("What is Perl? Answer in 2 sentences.");
say "Response: $response";
say "Tokens used: " . $response->total_tokens;
# Example 2: Chat with system prompt
say "\n2. Chat with system prompt:";
say "-" x 30;
$response = $claude->chat(
system => 'You are a grumpy Perl programmer who loves one-liners.',
messages => [
{ role => 'user', content => 'How do I reverse a string?' },
],
);
say "Response: $response";
# Example 3: Multi-turn conversation
say "\n3. Multi-turn conversation:";
say "-" x 30;
$response = $claude->chat(
messages => [
{ role => 'user', content => 'My name is Vugar.' },
{ role => 'assistant', content => 'Nice to meet you, Vugar!' },
{ role => 'user', content => 'What is my name?' },
],
);
say "Response: $response";
# Example 4: Streaming (if you want to see output in real-time)
say "\n4. Streaming:";
say "-" x 30;
say "Streamed response: ";
$claude->chat(
messages => [
{ role => 'user', content => 'Count from 1 to 5, one number per line.' },
],
stream => sub {
my ($chunk) = @_;
print $chunk;
},
);
lib/AI/Anthropic.pm view on Meta::CPAN
=head1 SYNOPSIS
use AI::Anthropic;
my $claude = AI::Anthropic->new(
api_key => 'sk-ant-api03-your-key-here',
);
# Simple message
my $response = $claude->message("What is the capital of France?");
print $response; # prints response text
# Chat with history
my $response = $claude->chat(
messages => [
{ role => 'user', content => 'Hello!' },
{ role => 'assistant', content => 'Hello! How can I help you today?' },
{ role => 'user', content => 'What is 2+2?' },
],
);
# With system prompt
my $response = $claude->chat(
system => 'You are a helpful Perl programmer.',
messages => [
{ role => 'user', content => 'How do I read a file?' },
],
);
# Streaming
$claude->chat(
messages => [ { role => 'user', content => 'Tell me a story' } ],
stream => sub {
lib/AI/Anthropic.pm view on Meta::CPAN
_json => JSON::PP->new->utf8->allow_nonref,
};
return bless $self, $class;
}
=head2 message
Simple interface for single message:
my $response = $claude->message("Your question here");
my $response = $claude->message("Your question", system => "You are helpful");
print $response->text;
=cut
sub message {
my ($self, $content, %opts) = @_;
croak "Message content required" unless defined $content;
return $self->chat(
messages => [ { role => 'user', content => $content } ],
%opts,
);
}
=head2 chat
Full chat interface:
my $response = $claude->chat(
messages => \@messages, # required
system => $system_prompt, # optional
model => $model, # optional, overrides default
max_tokens => $max_tokens, # optional
temperature => 0.7, # optional, 0.0-1.0
stream => \&callback, # optional, for streaming
tools => \@tools, # optional, for function calling
);
=cut
lib/AI/Anthropic.pm view on Meta::CPAN
source => {
type => 'url',
url => $url,
},
};
}
sub _request {
my ($self, $body) = @_;
my $response = $self->{_http}->post(
$self->{api_base} . '/v1/messages',
{
headers => $self->_headers,
content => $self->{_json}->encode($body),
}
);
return $self->_handle_response($response);
}
sub _stream_request {
my ($self, $body, $callback) = @_;
$body->{stream} = \1; # JSON true
my $full_text = '';
my $response_data;
# HTTP::Tiny doesn't support streaming well, so we use a data callback
my $response = $self->{_http}->post(
$self->{api_base} . '/v1/messages',
{
headers => $self->_headers,
content => $self->{_json}->encode($body),
data_callback => sub {
my ($chunk, $res) = @_;
# Parse SSE events
for my $line (split /\n/, $chunk) {
next unless $line =~ /^data: (.+)/;
my $data = $1;
next if $data eq '[DONE]';
eval {
my $event = $self->{_json}->decode($data);
if ($event->{type} eq 'content_block_delta') {
my $text = $event->{delta}{text} // '';
$full_text .= $text;
$callback->($text) if $callback;
} elsif ($event->{type} eq 'message_stop') {
$response_data = $event;
}
};
}
},
}
);
unless ($response->{success}) {
return $self->_handle_response($response);
}
# Return a response object with the full text
return AI::Anthropic::Response->new(
text => $full_text,
raw_response => $response_data,
);
}
sub _headers {
my ($self) = @_;
return {
'Content-Type' => 'application/json',
'x-api-key' => $self->{api_key},
'anthropic-version' => API_VERSION,
};
}
sub _handle_response {
my ($self, $response) = @_;
my $data;
eval {
$data = $self->{_json}->decode($response->{content});
};
unless ($response->{success}) {
my $error_msg = $data->{error}{message} // $response->{content} // 'Unknown error';
croak "Anthropic API error: $error_msg (status: $response->{status})";
}
return AI::Anthropic::Response->new(
text => $data->{content}[0]{text} // '',
role => $data->{role},
model => $data->{model},
stop_reason => $data->{stop_reason},
usage => $data->{usage},
raw_response => $data,
);
}
# ============================================
# Response class
# ============================================
package AI::Anthropic::Response;
use strict;
lib/AI/Anthropic.pm view on Meta::CPAN
sub new {
my ($class, %args) = @_;
return bless \%args, $class;
}
sub text { shift->{text} }
sub role { shift->{role} }
sub model { shift->{model} }
sub stop_reason { shift->{stop_reason} }
sub usage { shift->{usage} }
sub raw_response { shift->{raw_response} }
sub input_tokens { shift->{usage}{input_tokens} // 0 }
sub output_tokens { shift->{usage}{output_tokens} // 0 }
sub total_tokens {
my $self = shift;
return $self->input_tokens + $self->output_tokens;
}
1;
lib/AI/Anthropic.pm view on Meta::CPAN
=head2 Basic usage
use AI::Anthropic;
my $claude = AI::Anthropic->new;
print $claude->message("Hello, Claude!");
=head2 With image (vision)
my $response = $claude->chat(
messages => [
{
role => 'user',
content => [
{ type => 'text', text => 'What is in this image?' },
{ type => 'image', path => '/path/to/image.jpg' },
],
},
],
);
=head2 Tool use (function calling)
my $response = $claude->chat(
messages => [
{ role => 'user', content => 'What is the weather in London?' },
],
tools => [
{
name => 'get_weather',
description => 'Get current weather for a location',
input_schema => {
type => 'object',
properties => {
t/01-basic.t view on Meta::CPAN
}
# Test models list
{
my $claude = AI::Anthropic->new(api_key => 'test');
my @models = $claude->models;
ok(@models > 0, 'models() returns list');
ok(grep { /claude/ } @models, 'models contain claude');
}
# Test response object
{
my $response = AI::Anthropic::Response->new(
text => 'Hello!',
model => 'claude-sonnet-4-20250514',
stop_reason => 'end_turn',
usage => { input_tokens => 10, output_tokens => 5 },
);
is($response->text, 'Hello!', 'Response text');
is($response->model, 'claude-sonnet-4-20250514', 'Response model');
is($response->input_tokens, 10, 'Input tokens');
is($response->output_tokens, 5, 'Output tokens');
is($response->total_tokens, 15, 'Total tokens');
# Test stringification
is("$response", 'Hello!', 'Response stringifies to text');
}
done_testing();
( run in 0.306 second using v1.01-cache-2.11-cpan-62ea2d55848 )