|
12 | 12 | # See the License for the specific language governing permissions and |
13 | 13 | # limitations under the License. |
14 | 14 |
|
| 15 | +import datetime |
15 | 16 | import operator |
16 | 17 | import time |
17 | 18 |
|
18 | 19 | import unittest2 |
19 | 20 |
|
20 | 21 | from gcloud import _helpers |
| 22 | +from gcloud._helpers import _datetime_from_microseconds |
| 23 | +from gcloud._helpers import _microseconds_from_datetime |
| 24 | +from gcloud._helpers import UTC |
21 | 25 | from gcloud.bigtable.client import Client |
22 | 26 | from gcloud.bigtable.column_family import MaxVersionsGCRule |
| 27 | +from gcloud.bigtable.row_data import Cell |
23 | 28 | from gcloud.environment_vars import TESTS_PROJECT |
24 | 29 |
|
25 | 30 |
|
|
30 | 35 | TABLE_ID = 'gcloud-python-test-table' |
31 | 36 | COLUMN_FAMILY_ID1 = u'col-fam-id1' |
32 | 37 | COLUMN_FAMILY_ID2 = u'col-fam-id2' |
| 38 | +COL_NAME1 = b'col-name1' |
| 39 | +COL_NAME2 = b'col-name2' |
| 40 | +COL_NAME3 = b'col-name3-but-other-fam' |
| 41 | +CELL_VAL1 = b'cell-val' |
| 42 | +CELL_VAL2 = b'cell-val-newer' |
| 43 | +CELL_VAL3 = b'altcol-cell-val' |
| 44 | +CELL_VAL4 = b'foo' |
| 45 | +ROW_KEY = b'row-key' |
33 | 46 | EXISTING_CLUSTERS = [] |
34 | 47 | EXPECTED_ZONES = ( |
35 | 48 | 'asia-east1-b', |
@@ -292,3 +305,82 @@ def test_delete_column_family(self): |
292 | 305 | column_family.delete() |
293 | 306 | # Make sure we have successfully deleted it. |
294 | 307 | self.assertEqual(temp_table.list_column_families(), {}) |
| 308 | + |
| 309 | + |
| 310 | +class TestDataAPI(unittest2.TestCase): |
| 311 | + |
| 312 | + @classmethod |
| 313 | + def setUpClass(cls): |
| 314 | + cls._table = table = Config.CLUSTER.table(TABLE_ID) |
| 315 | + table.create() |
| 316 | + table.column_family(COLUMN_FAMILY_ID1).create() |
| 317 | + table.column_family(COLUMN_FAMILY_ID2).create() |
| 318 | + |
| 319 | + @classmethod |
| 320 | + def tearDownClass(cls): |
| 321 | + # Will also delete any data contained in the table. |
| 322 | + cls._table.delete() |
| 323 | + |
| 324 | + def setUp(self): |
| 325 | + self.rows_to_delete = [] |
| 326 | + |
| 327 | + def tearDown(self): |
| 328 | + for row in self.rows_to_delete: |
| 329 | + row.clear_mutations() |
| 330 | + row.delete() |
| 331 | + row.commit() |
| 332 | + |
| 333 | + def _write_to_row(self, row1=None, row2=None, row3=None, row4=None): |
| 334 | + timestamp1 = datetime.datetime.utcnow().replace(tzinfo=UTC) |
| 335 | + timestamp1_micros = _microseconds_from_datetime(timestamp1) |
| 336 | + # Truncate to millisecond granularity. |
| 337 | + timestamp1_micros -= (timestamp1_micros % 1000) |
| 338 | + timestamp1 = _datetime_from_microseconds(timestamp1_micros) |
| 339 | + # 1000 microseconds is a millisecond |
| 340 | + timestamp2 = timestamp1 + datetime.timedelta(microseconds=1000) |
| 341 | + timestamp3 = timestamp1 + datetime.timedelta(microseconds=2000) |
| 342 | + timestamp4 = timestamp1 + datetime.timedelta(microseconds=3000) |
| 343 | + if row1 is not None: |
| 344 | + row1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1, |
| 345 | + timestamp=timestamp1) |
| 346 | + if row2 is not None: |
| 347 | + row2.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL2, |
| 348 | + timestamp=timestamp2) |
| 349 | + if row3 is not None: |
| 350 | + row3.set_cell(COLUMN_FAMILY_ID1, COL_NAME2, CELL_VAL3, |
| 351 | + timestamp=timestamp3) |
| 352 | + if row4 is not None: |
| 353 | + row4.set_cell(COLUMN_FAMILY_ID2, COL_NAME3, CELL_VAL4, |
| 354 | + timestamp=timestamp4) |
| 355 | + |
| 356 | + # Create the cells we will check. |
| 357 | + cell1 = Cell(CELL_VAL1, timestamp1) |
| 358 | + cell2 = Cell(CELL_VAL2, timestamp2) |
| 359 | + cell3 = Cell(CELL_VAL3, timestamp3) |
| 360 | + cell4 = Cell(CELL_VAL4, timestamp4) |
| 361 | + return cell1, cell2, cell3, cell4 |
| 362 | + |
| 363 | + def test_read_row(self): |
| 364 | + row = self._table.row(ROW_KEY) |
| 365 | + self.rows_to_delete.append(row) |
| 366 | + |
| 367 | + cell1, cell2, cell3, cell4 = self._write_to_row(row, row, row, row) |
| 368 | + row.commit() |
| 369 | + |
| 370 | + # Read back the contents of the row. |
| 371 | + partial_row_data = self._table.read_row(ROW_KEY) |
| 372 | + self.assertTrue(partial_row_data.committed) |
| 373 | + self.assertEqual(partial_row_data.row_key, ROW_KEY) |
| 374 | + |
| 375 | + # Check the cells match. |
| 376 | + ts_attr = operator.attrgetter('timestamp') |
| 377 | + expected_row_contents = { |
| 378 | + COLUMN_FAMILY_ID1: { |
| 379 | + COL_NAME1: sorted([cell1, cell2], key=ts_attr, reverse=True), |
| 380 | + COL_NAME2: [cell3], |
| 381 | + }, |
| 382 | + COLUMN_FAMILY_ID2: { |
| 383 | + COL_NAME3: [cell4], |
| 384 | + }, |
| 385 | + } |
| 386 | + self.assertEqual(partial_row_data.cells, expected_row_contents) |
0 commit comments