Google-BigQuery

 view release on metacpan or  search on metacpan

README.md  view on Meta::CPAN

    # insert
    my $values = [];
    for (my $id = 101; $id <= 103; $id++) {
      push @$values, { id => $id, name => "name-${id}" };
    }
    $bq->insert(
      table_id => $table_id,
      values => $values,
    );

    # The first time a streaming insert occurs, the streamed data is inaccessible for a warm-up period of up to two minutes.
    sleep(120);

    # selectrow_array
    my ($count) = $bq->selectrow_array(query => "SELECT COUNT(*) FROM $table_id");
    print $count, "\n"; # 103

    # selectall_arrayref
    my $aref = $bq->selectall_arrayref(query => "SELECT * FROM $table_id ORDER BY id");
    foreach my $ref (@$aref) {
      print join("\t", @$ref), "\n";

README.md  view on Meta::CPAN

          quote => $quote,                          # optional
          schema => $schema,                        # optional
          skipLeadingRows => $skipLeadingRows,      # optional
          sourceFormat => $sourceFormat,            # optional
          writeDisposition => $writeDisposition,    # optional
        );

- insert

    Streams data into BigQuery one record at a time without needing to run a load job.
    See details at https://cloud.google.com/bigquery/streaming-data-into-bigquery.

        $bq->insert(                    # return 1 (success) or 0 (error)
          project_id => $project_id,    # required if default project is not set
          dataset_id => $dataset_id,    # required if default project is not set
          table_id => $table_id,        # required
          values => \@values,           # required
        );

- selectrow\_array

lib/Google/BigQuery.pm  view on Meta::CPAN

    # insert
    my $values = [];
    for (my $id = 101; $id <= 103; $id++) {
      push @$values, { id => $id, name => "name-${id}" };
    }
    $bq->insert(
      table_id => $table_id,
      values => $values,
    );

    # The first time a streaming insert occurs, the streamed data is inaccessible for a warm-up period of up to two minutes.
    sleep(120);

    # selectrow_array
    my ($count) = $bq->selectrow_array(query => "SELECT COUNT(*) FROM $table_id");
    print $count, "\n"; # 103

    # selectall_arrayref
    my $aref = $bq->selectall_arrayref(query => "SELECT * FROM $table_id ORDER BY id");
    foreach my $ref (@$aref) {
      print join("\t", @$ref), "\n";

lib/Google/BigQuery.pm  view on Meta::CPAN

    quote => $quote,                          # optional
    schema => $schema,                        # optional
    skipLeadingRows => $skipLeadingRows,      # optional
    sourceFormat => $sourceFormat,            # optional
    writeDisposition => $writeDisposition,    # optional
  );

=item * insert

Streams data into BigQuery one record at a time without needing to run a load job.
See details at https://cloud.google.com/bigquery/streaming-data-into-bigquery.

  $bq->insert(                    # return 1 (success) or 0 (error)
    project_id => $project_id,    # required if default project is not set
    dataset_id => $dataset_id,    # required if default project is not set
    table_id => $table_id,        # required
    values => \@values,           # required
  );

=item * selectrow_array

t/synopsis.pl  view on Meta::CPAN

# insert
my $values = [];
for (my $id = 101; $id <= 103; $id++) {
  push @$values, { id => $id, name => "name-${id}" };
}
$bq->insert(
  table_id => $table_id,
  values => $values,
);

# The first time a streaming insert occurs, the streamed data is inaccessible for a warm-up period of up to two minutes.
sleep(120); 

# selectrow_array
my ($count) = $bq->selectrow_array(query => "SELECT COUNT(*) FROM $table_id");
print $count, "\n"; # 103

# selectall_arrayref
my $aref = $bq->selectall_arrayref(query => "SELECT * FROM $table_id ORDER BY id");
foreach my $ref (@$aref) {
  print join("\t", @$ref), "\n";



( run in 0.270 second using v1.01-cache-2.11-cpan-4d50c553e7e )