@@ -227,6 +227,23 @@ Update all writable metadata for a table
227227 ... SchemaField(name = ' age' , type = ' int' , mode = ' required)]
228228 >>> table.update() # API request
229229
230+ Get rows from a table's data:
231+
232+ .. doctest ::
233+
234+ >>> from gcloud import bigquery
235+ >>> client = bigquery.Client()
236+ >>> dataset = client.dataset(' dataset_name' )
237+ >>> table = dataset.table(name = ' person_ages' )
238+ >>> rows, next_page_token = table.data(max_results = 100 ) # API request
239+ >>> rows.csv.headers
240+ ('full_name', 'age')
241+ >>> list (rows.csv)
242+ [('Abel Adamson', 27), ('Beverly Bowman', 33)]
243+ >>> for row in rows:
244+ ... for field, value in zip (table.schema, row):
245+ ... do_something(field, value)
246+
230247Delete a table:
231248
232249.. doctest ::
@@ -307,7 +324,7 @@ Background a query, loading the results into a table:
307324 >>> job.job_id
308325 'e3344fba-09df-4ae0-8337-fddee34b3840'
309326 >>> job.type
310- 'load '
327+ 'query '
311328 >>> job.created
312329 None
313330 >>> job.state
@@ -377,8 +394,8 @@ Inserting data (asynchronous)
377394~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
378395
379396Start a job loading data asynchronously from a set of CSV files, located on
380- GCloud Storage, appending rows into an existing table. First, create the job
381- locally:
397+ Google Cloud Storage, appending rows into an existing table. First, create
398+ the job locally:
382399
383400.. doctest ::
384401
@@ -429,3 +446,112 @@ Poll until the job is complete:
429446 'done'
430447 >>> job.ended
431448 datetime.datetime(2015, 7, 23, 9, 30, 21, 334792, tzinfo=<UTC>)
449+
450+ Exporting data (async)
451+ ~~~~~~~~~~~~~~~~~~~~~~
452+
453+ Start a job exporting a table's data asynchronously to a set of CSV files,
454+ located on Google Cloud Storage. First, create the job locally:
455+
456+ .. doctest ::
457+
458+ >>> from gcloud import bigquery
459+ >>> client = bigquery.Client()
460+ >>> table = dataset.table(name = ' person_ages' )
461+ >>> job = table.export_to_storage(bucket_name = ' bucket-name' ,
462+ ... object_name_glob= ' export-prefix*.csv' ,
463+ ... destination_format= ' CSV' ,
464+ ... print_header= 1 ,
465+ ... write_disposition= ' truncate' )
466+ >>> job.job_id
467+ 'e3344fba-09df-4ae0-8337-fddee34b3840'
468+ >>> job.type
469+ 'load'
470+ >>> job.created
471+ None
472+ >>> job.state
473+ None
474+
475+ .. note ::
476+
477+ - ``gcloud.bigquery `` generates a UUID for each job.
478+ - The ``created `` and ``state `` fields are not set until the job
479+ is submitted to the BigQuery back-end.
480+
481+ Then, begin executing the job on the server:
482+
483+ .. doctest ::
484+
485+ >>> job.submit() # API call
486+ >>> job.created
487+ datetime.datetime(2015, 7, 23, 9, 30, 20, 268260, tzinfo=<UTC>)
488+ >>> job.state
489+ 'running'
490+
491+ Poll until the job is complete:
492+
493+ .. doctest ::
494+
495+ >>> import time
496+ >>> retry_count = 100
497+ >>> while retry_count > 0 and job.state == ' running' :
498+ ... retry_count -= 1
499+ ... time.sleep(10 )
500+ ... job.reload() # API call
501+ >>> job.state
502+ 'done'
503+ >>> job.ended
504+ datetime.datetime(2015, 7, 23, 9, 30, 21, 334792, tzinfo=<UTC>)
505+
506+
507+ Copy tables (async)
508+ ~~~~~~~~~~~~~~~~~~~
509+
510+ First, create the job locally:
511+
512+ .. doctest ::
513+
514+ >>> from gcloud import bigquery
515+ >>> client = bigquery.Client()
516+ >>> source_table = dataset.table(name = ' person_ages' )
517+ >>> destination_table = dataset.table(name = ' person_ages_copy' )
518+ >>> job = source_table.copy_to(destination_table) # API request
519+ >>> job.job_id
520+ 'e3344fba-09df-4ae0-8337-fddee34b3840'
521+ >>> job.type
522+ 'copy'
523+ >>> job.created
524+ None
525+ >>> job.state
526+ None
527+
528+ .. note ::
529+
530+ - ``gcloud.bigquery `` generates a UUID for each job.
531+ - The ``created `` and ``state `` fields are not set until the job
532+ is submitted to the BigQuery back-end.
533+
534+ Then, begin executing the job on the server:
535+
536+ .. doctest ::
537+
538+ >>> job.submit() # API call
539+ >>> job.created
540+ datetime.datetime(2015, 7, 23, 9, 30, 20, 268260, tzinfo=<UTC>)
541+ >>> job.state
542+ 'running'
543+
544+ Poll until the job is complete:
545+
546+ .. doctest ::
547+
548+ >>> import time
549+ >>> retry_count = 100
550+ >>> while retry_count > 0 and job.state == ' running' :
551+ ... retry_count -= 1
552+ ... time.sleep(10 )
553+ ... job.reload() # API call
554+ >>> job.state
555+ 'done'
556+ >>> job.ended
557+ datetime.datetime(2015, 7, 23, 9, 30, 21, 334792, tzinfo=<UTC>)
0 commit comments