From b4a8eb97a68b4c7d1bc9faf0b113dca4476d9f1f Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 14 Jul 2020 10:40:14 -0700 Subject: [PATCH] feat!: remove v1beta1 surface for v2 (#96) * remove v1beta1 code * remove v1beta1 unit tests * remove v1beta1 gapic tests --- google/cloud/firestore_v1beta1/__init__.py | 149 - google/cloud/firestore_v1beta1/_helpers.py | 1000 ------- google/cloud/firestore_v1beta1/batch.py | 164 - google/cloud/firestore_v1beta1/client.py | 546 ---- google/cloud/firestore_v1beta1/collection.py | 482 --- google/cloud/firestore_v1beta1/document.py | 787 ----- google/cloud/firestore_v1beta1/field_path.py | 386 --- google/cloud/firestore_v1beta1/order.py | 207 -- google/cloud/firestore_v1beta1/py.typed | 2 - google/cloud/firestore_v1beta1/query.py | 969 ------ .../firestore_v1beta1/services/__init__.py | 16 - .../services/firestore/__init__.py | 24 - .../services/firestore/async_client.py | 946 ------ .../services/firestore/client.py | 1059 ------- .../services/firestore/pagers.py | 149 - .../services/firestore/transports/__init__.py | 36 - .../services/firestore/transports/base.py | 222 -- .../services/firestore/transports/grpc.py | 555 ---- .../firestore/transports/grpc_asyncio.py | 561 ---- google/cloud/firestore_v1beta1/transaction.py | 415 --- google/cloud/firestore_v1beta1/transforms.py | 90 - .../cloud/firestore_v1beta1/types/__init__.py | 109 - .../cloud/firestore_v1beta1/types/common.py | 112 - .../cloud/firestore_v1beta1/types/document.py | 195 -- .../firestore_v1beta1/types/firestore.py | 916 ------ google/cloud/firestore_v1beta1/types/query.py | 298 -- google/cloud/firestore_v1beta1/types/write.py | 376 --- google/cloud/firestore_v1beta1/watch.py | 723 ----- .../test_firestore_v1beta1.py | 2632 ----------------- tests/unit/v1beta1/__init__.py | 13 - tests/unit/v1beta1/_test_cross_language.py | 503 ---- tests/unit/v1beta1/test__helpers.py | 2087 ------------- tests/unit/v1beta1/test_batch.py | 280 -- tests/unit/v1beta1/test_client.py | 677 ----- tests/unit/v1beta1/test_collection.py | 605 ---- tests/unit/v1beta1/test_document.py | 839 ------ tests/unit/v1beta1/test_field_path.py | 495 ---- tests/unit/v1beta1/test_order.py | 247 -- tests/unit/v1beta1/test_query.py | 1601 ---------- tests/unit/v1beta1/test_transaction.py | 1047 ------- tests/unit/v1beta1/test_transforms.py | 65 - tests/unit/v1beta1/test_watch.py | 849 ------ .../testdata/create-all-transforms.textproto | 64 - .../create-arrayremove-multi.textproto | 61 - .../create-arrayremove-nested.textproto | 48 - ...reate-arrayremove-noarray-nested.textproto | 12 - .../create-arrayremove-noarray.textproto | 12 - .../create-arrayremove-with-st.textproto | 12 - .../testdata/create-arrayremove.textproto | 47 - .../create-arrayunion-multi.textproto | 61 - .../create-arrayunion-nested.textproto | 48 - ...create-arrayunion-noarray-nested.textproto | 12 - .../create-arrayunion-noarray.textproto | 12 - .../create-arrayunion-with-st.textproto | 12 - .../testdata/create-arrayunion.textproto | 47 - .../v1beta1/testdata/create-basic.textproto | 27 - .../v1beta1/testdata/create-complex.textproto | 61 - .../create-del-noarray-nested.textproto | 13 - .../testdata/create-del-noarray.textproto | 13 - .../v1beta1/testdata/create-empty.textproto | 20 - .../v1beta1/testdata/create-nodel.textproto | 11 - .../v1beta1/testdata/create-nosplit.textproto | 40 - .../testdata/create-special-chars.textproto | 41 - .../testdata/create-st-alone.textproto | 26 - .../testdata/create-st-multi.textproto | 41 - .../testdata/create-st-nested.textproto | 38 - .../create-st-noarray-nested.textproto | 12 - .../testdata/create-st-noarray.textproto | 12 - .../create-st-with-empty-map.textproto | 45 - .../unit/v1beta1/testdata/create-st.textproto | 39 - .../testdata/delete-exists-precond.textproto | 21 - .../testdata/delete-no-precond.textproto | 15 - .../testdata/delete-time-precond.textproto | 25 - .../unit/v1beta1/testdata/get-basic.textproto | 12 - .../testdata/listen-add-mod-del-add.textproto | 246 -- .../v1beta1/testdata/listen-add-one.textproto | 79 - .../testdata/listen-add-three.textproto | 190 -- .../testdata/listen-doc-remove.textproto | 115 - .../v1beta1/testdata/listen-empty.textproto | 25 - .../testdata/listen-filter-nop.textproto | 247 -- .../testdata/listen-multi-docs.textproto | 524 ---- .../testdata/listen-nocurrent.textproto | 141 - .../v1beta1/testdata/listen-nomod.textproto | 143 - .../listen-removed-target-ids.textproto | 131 - .../v1beta1/testdata/listen-reset.textproto | 382 --- .../testdata/listen-target-add-nop.textproto | 88 - .../listen-target-add-wrong-id.textproto | 50 - .../testdata/listen-target-remove.textproto | 46 - .../query-arrayremove-cursor.textproto | 23 - .../query-arrayremove-where.textproto | 19 - .../query-arrayunion-cursor.textproto | 23 - .../testdata/query-arrayunion-where.textproto | 19 - .../v1beta1/testdata/query-bad-NaN.textproto | 19 - .../v1beta1/testdata/query-bad-null.textproto | 19 - .../query-cursor-docsnap-order.textproto | 68 - ...uery-cursor-docsnap-orderby-name.textproto | 76 - .../query-cursor-docsnap-where-eq.textproto | 53 - ...cursor-docsnap-where-neq-orderby.textproto | 72 - .../query-cursor-docsnap-where-neq.textproto | 64 - .../testdata/query-cursor-docsnap.textproto | 34 - ...query-cursor-endbefore-empty-map.textproto | 41 - .../query-cursor-endbefore-empty.textproto | 23 - .../testdata/query-cursor-no-order.textproto | 16 - .../query-cursor-startat-empty-map.textproto | 41 - .../query-cursor-startat-empty.textproto | 23 - .../testdata/query-cursor-vals-1a.textproto | 50 - .../testdata/query-cursor-vals-1b.textproto | 48 - .../testdata/query-cursor-vals-2.textproto | 71 - .../query-cursor-vals-docid.textproto | 50 - .../query-cursor-vals-last-wins.textproto | 60 - .../testdata/query-del-cursor.textproto | 23 - .../testdata/query-del-where.textproto | 19 - .../testdata/query-invalid-operator.textproto | 19 - .../query-invalid-path-order.textproto | 19 - .../query-invalid-path-select.textproto | 18 - .../query-invalid-path-where.textproto | 20 - .../query-offset-limit-last-wins.textproto | 30 - .../testdata/query-offset-limit.textproto | 24 - .../v1beta1/testdata/query-order.textproto | 42 - .../testdata/query-select-empty.textproto | 23 - .../testdata/query-select-last-wins.textproto | 36 - .../v1beta1/testdata/query-select.textproto | 32 - .../testdata/query-st-cursor.textproto | 23 - .../v1beta1/testdata/query-st-where.textproto | 19 - .../v1beta1/testdata/query-where-2.textproto | 59 - .../testdata/query-where-NaN.textproto | 31 - .../testdata/query-where-null.textproto | 31 - .../v1beta1/testdata/query-where.textproto | 34 - .../testdata/query-wrong-collection.textproto | 19 - .../testdata/set-all-transforms.textproto | 61 - .../testdata/set-arrayremove-multi.textproto | 58 - .../testdata/set-arrayremove-nested.textproto | 45 - .../set-arrayremove-noarray-nested.textproto | 12 - .../set-arrayremove-noarray.textproto | 12 - .../set-arrayremove-with-st.textproto | 12 - .../testdata/set-arrayremove.textproto | 44 - .../testdata/set-arrayunion-multi.textproto | 58 - .../testdata/set-arrayunion-nested.textproto | 45 - .../set-arrayunion-noarray-nested.textproto | 12 - .../testdata/set-arrayunion-noarray.textproto | 12 - .../testdata/set-arrayunion-with-st.textproto | 12 - .../v1beta1/testdata/set-arrayunion.textproto | 44 - .../unit/v1beta1/testdata/set-basic.textproto | 24 - .../v1beta1/testdata/set-complex.textproto | 58 - .../testdata/set-del-merge-alone.textproto | 28 - .../v1beta1/testdata/set-del-merge.textproto | 37 - .../testdata/set-del-mergeall.textproto | 31 - .../testdata/set-del-noarray-nested.textproto | 13 - .../testdata/set-del-noarray.textproto | 13 - .../testdata/set-del-nomerge.textproto | 17 - .../testdata/set-del-nonleaf.textproto | 19 - .../testdata/set-del-wo-merge.textproto | 12 - .../unit/v1beta1/testdata/set-empty.textproto | 17 - .../v1beta1/testdata/set-merge-fp.textproto | 40 - .../testdata/set-merge-nested.textproto | 41 - .../testdata/set-merge-nonleaf.textproto | 46 - .../testdata/set-merge-prefix.textproto | 21 - .../testdata/set-merge-present.textproto | 20 - .../unit/v1beta1/testdata/set-merge.textproto | 32 - .../testdata/set-mergeall-empty.textproto | 23 - .../testdata/set-mergeall-nested.textproto | 45 - .../v1beta1/testdata/set-mergeall.textproto | 37 - .../unit/v1beta1/testdata/set-nodel.textproto | 11 - .../v1beta1/testdata/set-nosplit.textproto | 37 - .../testdata/set-special-chars.textproto | 38 - .../testdata/set-st-alone-mergeall.textproto | 26 - .../v1beta1/testdata/set-st-alone.textproto | 28 - .../testdata/set-st-merge-both.textproto | 45 - .../set-st-merge-nonleaf-alone.textproto | 37 - .../testdata/set-st-merge-nonleaf.textproto | 49 - .../testdata/set-st-merge-nowrite.textproto | 28 - .../testdata/set-st-mergeall.textproto | 40 - .../v1beta1/testdata/set-st-multi.textproto | 38 - .../v1beta1/testdata/set-st-nested.textproto | 35 - .../testdata/set-st-noarray-nested.textproto | 12 - .../v1beta1/testdata/set-st-noarray.textproto | 12 - .../v1beta1/testdata/set-st-nomerge.textproto | 33 - .../testdata/set-st-with-empty-map.textproto | 42 - tests/unit/v1beta1/testdata/set-st.textproto | 36 - .../unit/v1beta1/testdata/test-suite.binproto | Bin 55916 -> 0 bytes .../testdata/update-all-transforms.textproto | 67 - .../update-arrayremove-alone.textproto | 36 - .../update-arrayremove-multi.textproto | 69 - .../update-arrayremove-nested.textproto | 52 - ...pdate-arrayremove-noarray-nested.textproto | 12 - .../update-arrayremove-noarray.textproto | 12 - .../update-arrayremove-with-st.textproto | 12 - .../testdata/update-arrayremove.textproto | 50 - .../update-arrayunion-alone.textproto | 36 - .../update-arrayunion-multi.textproto | 69 - .../update-arrayunion-nested.textproto | 52 - ...update-arrayunion-noarray-nested.textproto | 12 - .../update-arrayunion-noarray.textproto | 12 - .../update-arrayunion-with-st.textproto | 12 - .../testdata/update-arrayunion.textproto | 50 - .../v1beta1/testdata/update-badchar.textproto | 12 - .../v1beta1/testdata/update-basic.textproto | 30 - .../v1beta1/testdata/update-complex.textproto | 65 - .../testdata/update-del-alone.textproto | 25 - .../v1beta1/testdata/update-del-dot.textproto | 46 - .../testdata/update-del-nested.textproto | 11 - .../update-del-noarray-nested.textproto | 13 - .../testdata/update-del-noarray.textproto | 13 - .../v1beta1/testdata/update-del.textproto | 32 - .../testdata/update-exists-precond.textproto | 14 - .../update-fp-empty-component.textproto | 11 - ...ested-transform-and-nested-value.textproto | 58 - .../testdata/update-no-paths.textproto | 11 - .../update-paths-all-transforms.textproto | 82 - .../update-paths-arrayremove-alone.textproto | 39 - .../update-paths-arrayremove-multi.textproto | 76 - .../update-paths-arrayremove-nested.textproto | 59 - ...paths-arrayremove-noarray-nested.textproto | 15 - ...update-paths-arrayremove-noarray.textproto | 15 - ...update-paths-arrayremove-with-st.textproto | 15 - .../update-paths-arrayremove.textproto | 57 - .../update-paths-arrayunion-alone.textproto | 39 - .../update-paths-arrayunion-multi.textproto | 76 - .../update-paths-arrayunion-nested.textproto | 59 - ...-paths-arrayunion-noarray-nested.textproto | 15 - .../update-paths-arrayunion-noarray.textproto | 15 - .../update-paths-arrayunion-with-st.textproto | 15 - .../update-paths-arrayunion.textproto | 57 - .../testdata/update-paths-basic.textproto | 33 - .../testdata/update-paths-complex.textproto | 72 - .../testdata/update-paths-del-alone.textproto | 28 - .../update-paths-del-nested.textproto | 14 - .../update-paths-del-noarray-nested.textproto | 16 - .../update-paths-del-noarray.textproto | 16 - .../testdata/update-paths-del.textproto | 39 - .../update-paths-exists-precond.textproto | 17 - .../testdata/update-paths-fp-del.textproto | 47 - .../update-paths-fp-dup-transforms.textproto | 23 - .../testdata/update-paths-fp-dup.textproto | 22 - .../update-paths-fp-empty-component.textproto | 15 - .../testdata/update-paths-fp-empty.textproto | 13 - .../testdata/update-paths-fp-multi.textproto | 42 - .../update-paths-fp-nosplit.textproto | 48 - .../testdata/update-paths-no-paths.textproto | 10 - .../testdata/update-paths-prefix-1.textproto | 19 - .../testdata/update-paths-prefix-2.textproto | 19 - .../testdata/update-paths-prefix-3.textproto | 20 - .../update-paths-special-chars.textproto | 53 - .../testdata/update-paths-st-alone.textproto | 29 - .../testdata/update-paths-st-multi.textproto | 56 - .../testdata/update-paths-st-nested.textproto | 49 - .../update-paths-st-noarray-nested.textproto | 15 - .../update-paths-st-noarray.textproto | 15 - .../update-paths-st-with-empty-map.textproto | 51 - .../testdata/update-paths-st.textproto | 49 - .../testdata/update-paths-uptime.textproto | 40 - .../testdata/update-prefix-1.textproto | 11 - .../testdata/update-prefix-2.textproto | 11 - .../testdata/update-prefix-3.textproto | 12 - .../v1beta1/testdata/update-quoting.textproto | 45 - .../testdata/update-split-top-level.textproto | 45 - .../v1beta1/testdata/update-split.textproto | 44 - .../testdata/update-st-alone.textproto | 26 - .../v1beta1/testdata/update-st-dot.textproto | 27 - .../testdata/update-st-multi.textproto | 49 - .../testdata/update-st-nested.textproto | 42 - .../update-st-noarray-nested.textproto | 12 - .../testdata/update-st-noarray.textproto | 12 - .../update-st-with-empty-map.textproto | 48 - .../unit/v1beta1/testdata/update-st.textproto | 42 - .../v1beta1/testdata/update-uptime.textproto | 37 - 266 files changed, 32643 deletions(-) delete mode 100644 google/cloud/firestore_v1beta1/__init__.py delete mode 100644 google/cloud/firestore_v1beta1/_helpers.py delete mode 100644 google/cloud/firestore_v1beta1/batch.py delete mode 100644 google/cloud/firestore_v1beta1/client.py delete mode 100644 google/cloud/firestore_v1beta1/collection.py delete mode 100644 google/cloud/firestore_v1beta1/document.py delete mode 100644 google/cloud/firestore_v1beta1/field_path.py delete mode 100644 google/cloud/firestore_v1beta1/order.py delete mode 100644 google/cloud/firestore_v1beta1/py.typed delete mode 100644 google/cloud/firestore_v1beta1/query.py delete mode 100644 google/cloud/firestore_v1beta1/services/__init__.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/__init__.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/async_client.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/client.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/pagers.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/transports/__init__.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/transports/base.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/transports/grpc.py delete mode 100644 google/cloud/firestore_v1beta1/services/firestore/transports/grpc_asyncio.py delete mode 100644 google/cloud/firestore_v1beta1/transaction.py delete mode 100644 google/cloud/firestore_v1beta1/transforms.py delete mode 100644 google/cloud/firestore_v1beta1/types/__init__.py delete mode 100644 google/cloud/firestore_v1beta1/types/common.py delete mode 100644 google/cloud/firestore_v1beta1/types/document.py delete mode 100644 google/cloud/firestore_v1beta1/types/firestore.py delete mode 100644 google/cloud/firestore_v1beta1/types/query.py delete mode 100644 google/cloud/firestore_v1beta1/types/write.py delete mode 100644 google/cloud/firestore_v1beta1/watch.py delete mode 100644 tests/unit/gapic/firestore_v1beta1/test_firestore_v1beta1.py delete mode 100644 tests/unit/v1beta1/__init__.py delete mode 100644 tests/unit/v1beta1/_test_cross_language.py delete mode 100644 tests/unit/v1beta1/test__helpers.py delete mode 100644 tests/unit/v1beta1/test_batch.py delete mode 100644 tests/unit/v1beta1/test_client.py delete mode 100644 tests/unit/v1beta1/test_collection.py delete mode 100644 tests/unit/v1beta1/test_document.py delete mode 100644 tests/unit/v1beta1/test_field_path.py delete mode 100644 tests/unit/v1beta1/test_order.py delete mode 100644 tests/unit/v1beta1/test_query.py delete mode 100644 tests/unit/v1beta1/test_transaction.py delete mode 100644 tests/unit/v1beta1/test_transforms.py delete mode 100644 tests/unit/v1beta1/test_watch.py delete mode 100644 tests/unit/v1beta1/testdata/create-all-transforms.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayremove.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-arrayunion.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-basic.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-complex.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-del-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-nodel.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-nosplit.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-special-chars.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/create-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/delete-exists-precond.textproto delete mode 100644 tests/unit/v1beta1/testdata/delete-no-precond.textproto delete mode 100644 tests/unit/v1beta1/testdata/delete-time-precond.textproto delete mode 100644 tests/unit/v1beta1/testdata/get-basic.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-add-one.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-add-three.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-doc-remove.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-filter-nop.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-multi-docs.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-nocurrent.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-nomod.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-reset.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-target-add-nop.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto delete mode 100644 tests/unit/v1beta1/testdata/listen-target-remove.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-arrayremove-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-arrayunion-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-bad-NaN.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-bad-null.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-no-order.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-del-cursor.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-del-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-invalid-operator.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-invalid-path-order.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-invalid-path-select.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-invalid-path-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-offset-limit.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-order.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-select-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-select-last-wins.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-select.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-st-cursor.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-st-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-where-2.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-where-NaN.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-where-null.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-where.textproto delete mode 100644 tests/unit/v1beta1/testdata/query-wrong-collection.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-all-transforms.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayremove.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-arrayunion.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-basic.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-complex.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-merge-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-merge.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-mergeall.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-nomerge.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-nonleaf.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-del-wo-merge.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge-fp.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge-prefix.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge-present.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-merge.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-mergeall-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-mergeall-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-mergeall.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-nodel.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-nosplit.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-special-chars.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-merge-both.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-mergeall.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-nomerge.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/set-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/test-suite.binproto delete mode 100644 tests/unit/v1beta1/testdata/update-all-transforms.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayremove.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-arrayunion.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-badchar.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-basic.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-complex.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del-dot.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-del.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-exists-precond.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-fp-empty-component.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-no-paths.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-basic.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-complex.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-del-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-del-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-del.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-del.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-no-paths.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-special-chars.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-paths-uptime.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-prefix-1.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-prefix-2.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-prefix-3.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-quoting.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-split-top-level.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-split.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-alone.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-dot.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-multi.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-noarray.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-st.textproto delete mode 100644 tests/unit/v1beta1/testdata/update-uptime.textproto diff --git a/google/cloud/firestore_v1beta1/__init__.py b/google/cloud/firestore_v1beta1/__init__.py deleted file mode 100644 index 8349c0e96..000000000 --- a/google/cloud/firestore_v1beta1/__init__.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -"""Python idiomatic client for Google Cloud Firestore.""" - -from pkg_resources import get_distribution - -__version__ = get_distribution("google-cloud-firestore").version - -from google.cloud.firestore_v1beta1 import types -from google.cloud.firestore_v1beta1._helpers import GeoPoint -from google.cloud.firestore_v1beta1._helpers import ExistsOption -from google.cloud.firestore_v1beta1._helpers import LastUpdateOption -from google.cloud.firestore_v1beta1._helpers import ReadAfterWriteError -from google.cloud.firestore_v1beta1._helpers import WriteOption -from google.cloud.firestore_v1beta1.batch import WriteBatch -from google.cloud.firestore_v1beta1.client import Client -from google.cloud.firestore_v1beta1.collection import CollectionReference -from google.cloud.firestore_v1beta1.transforms import ArrayRemove -from google.cloud.firestore_v1beta1.transforms import ArrayUnion -from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD -from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP -from google.cloud.firestore_v1beta1.document import DocumentReference -from google.cloud.firestore_v1beta1.document import DocumentSnapshot -from google.cloud.firestore_v1beta1.query import Query -from google.cloud.firestore_v1beta1.transaction import Transaction -from google.cloud.firestore_v1beta1.transaction import transactional -from google.cloud.firestore_v1beta1.watch import Watch - - -from .services.firestore import FirestoreClient -from .types.common import DocumentMask -from .types.common import Precondition -from .types.common import TransactionOptions -from .types.document import ArrayValue -from .types.document import Document -from .types.document import MapValue -from .types.document import Value -from .types.firestore import BatchGetDocumentsRequest -from .types.firestore import BatchGetDocumentsResponse -from .types.firestore import BeginTransactionRequest -from .types.firestore import BeginTransactionResponse -from .types.firestore import CommitRequest -from .types.firestore import CommitResponse -from .types.firestore import CreateDocumentRequest -from .types.firestore import DeleteDocumentRequest -from .types.firestore import GetDocumentRequest -from .types.firestore import ListCollectionIdsRequest -from .types.firestore import ListCollectionIdsResponse -from .types.firestore import ListDocumentsRequest -from .types.firestore import ListDocumentsResponse -from .types.firestore import ListenRequest -from .types.firestore import ListenResponse -from .types.firestore import RollbackRequest -from .types.firestore import RunQueryRequest -from .types.firestore import RunQueryResponse -from .types.firestore import Target -from .types.firestore import TargetChange -from .types.firestore import UpdateDocumentRequest -from .types.firestore import WriteRequest -from .types.firestore import WriteResponse -from .types.query import Cursor -from .types.query import StructuredQuery -from .types.write import DocumentChange -from .types.write import DocumentDelete -from .types.write import DocumentRemove -from .types.write import DocumentTransform -from .types.write import ExistenceFilter -from .types.write import Write -from .types.write import WriteResult - - -__all__ = ( - "ArrayValue", - "BatchGetDocumentsRequest", - "BatchGetDocumentsResponse", - "BeginTransactionRequest", - "BeginTransactionResponse", - "CommitRequest", - "CommitResponse", - "CreateDocumentRequest", - "Cursor", - "DeleteDocumentRequest", - "Document", - "DocumentChange", - "DocumentDelete", - "DocumentMask", - "DocumentRemove", - "DocumentTransform", - "ExistenceFilter", - "GetDocumentRequest", - "ListCollectionIdsRequest", - "ListCollectionIdsResponse", - "ListDocumentsRequest", - "ListDocumentsResponse", - "ListenRequest", - "ListenResponse", - "MapValue", - "Precondition", - "RollbackRequest", - "RunQueryRequest", - "RunQueryResponse", - "StructuredQuery", - "Target", - "TargetChange", - "TransactionOptions", - "UpdateDocumentRequest", - "Value", - "Write", - "WriteRequest", - "WriteResponse", - "WriteResult", - "FirestoreClient", - "__version__", - "ArrayRemove", - "ArrayUnion", - "Client", - "CollectionReference", - "DELETE_FIELD", - "DocumentReference", - "DocumentSnapshot", - "ExistsOption", - "GeoPoint", - "LastUpdateOption", - "Query", - "ReadAfterWriteError", - "SERVER_TIMESTAMP", - "Transaction", - "transactional", - "types", - "Watch", - "WriteBatch", - "WriteOption", -) diff --git a/google/cloud/firestore_v1beta1/_helpers.py b/google/cloud/firestore_v1beta1/_helpers.py deleted file mode 100644 index 6a192490e..000000000 --- a/google/cloud/firestore_v1beta1/_helpers.py +++ /dev/null @@ -1,1000 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Common helpers shared across Google Cloud Firestore modules.""" - -import datetime - -from google.protobuf import struct_pb2 -from google.type import latlng_pb2 -import grpc -import six - -from google.cloud import exceptions -from google.cloud._helpers import _datetime_to_pb_timestamp -from google.api_core.datetime_helpers import DatetimeWithNanoseconds -from google.cloud.firestore_v1beta1 import transforms -from google.cloud.firestore_v1beta1 import types -from google.cloud.firestore_v1beta1.field_path import FieldPath -from google.cloud.firestore_v1beta1.field_path import parse_field_path - -from google.cloud.firestore_v1beta1.types.write import DocumentTransform - -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import write - - -BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}." -DOCUMENT_PATH_DELIMITER = "/" -INACTIVE_TXN = "Transaction not in progress, cannot be used in API requests." -READ_AFTER_WRITE_ERROR = "Attempted read after write in a transaction." -BAD_REFERENCE_ERROR = ( - "Reference value {!r} in unexpected format, expected to be of the form " - "``projects/{{project}}/databases/{{database}}/" - "documents/{{document_path}}``." -) -WRONG_APP_REFERENCE = ( - "Document {!r} does not correspond to the same database " "({!r}) as the client." -) -REQUEST_TIME_ENUM = DocumentTransform.FieldTransform.ServerValue.REQUEST_TIME -_GRPC_ERROR_MAPPING = { - grpc.StatusCode.ALREADY_EXISTS: exceptions.Conflict, - grpc.StatusCode.NOT_FOUND: exceptions.NotFound, -} - - -class GeoPoint(object): - """Simple container for a geo point value. - - Args: - latitude (float): Latitude of a point. - longitude (float): Longitude of a point. - """ - - def __init__(self, latitude, longitude): - self.latitude = latitude - self.longitude = longitude - - def to_protobuf(self): - """Convert the current object to protobuf. - - Returns: - google.type.latlng_pb2.LatLng: The current point as a protobuf. - """ - return latlng_pb2.LatLng(latitude=self.latitude, longitude=self.longitude) - - def __eq__(self, other): - """Compare two geo points for equality. - - Returns: - Union[bool, NotImplemented]: :data:`True` if the points compare - equal, else :data:`False`. (Or :data:`NotImplemented` if - ``other`` is not a geo point.) - """ - if not isinstance(other, GeoPoint): - return NotImplemented - - return self.latitude == other.latitude and self.longitude == other.longitude - - def __ne__(self, other): - """Compare two geo points for inequality. - - Returns: - Union[bool, NotImplemented]: :data:`False` if the points compare - equal, else :data:`True`. (Or :data:`NotImplemented` if - ``other`` is not a geo point.) - """ - equality_val = self.__eq__(other) - if equality_val is NotImplemented: - return NotImplemented - else: - return not equality_val - - -def verify_path(path, is_collection): - """Verifies that a ``path`` has the correct form. - - Checks that all of the elements in ``path`` are strings. - - Args: - path (Tuple[str, ...]): The components in a collection or - document path. - is_collection (bool): Indicates if the ``path`` represents - a document or a collection. - - Raises: - ValueError: if - - * the ``path`` is empty - * ``is_collection=True`` and there are an even number of elements - * ``is_collection=False`` and there are an odd number of elements - * an element is not a string - """ - num_elements = len(path) - if num_elements == 0: - raise ValueError("Document or collection path cannot be empty") - - if is_collection: - if num_elements % 2 == 0: - raise ValueError("A collection must have an odd number of path elements") - else: - if num_elements % 2 == 1: - raise ValueError("A document must have an even number of path elements") - - for element in path: - if not isinstance(element, six.string_types): - msg = BAD_PATH_TEMPLATE.format(element, type(element)) - raise ValueError(msg) - - -def encode_value(value): - """Converts a native Python value into a Firestore protobuf ``Value``. - - Args: - value (Union[NoneType, bool, int, float, datetime.datetime, \ - str, bytes, dict, ~google.cloud.Firestore.GeoPoint]): A native - Python value to convert to a protobuf field. - - Returns: - ~google.cloud.firestore_v1beta1.types.Value: A - value encoded as a Firestore protobuf. - - Raises: - TypeError: If the ``value`` is not one of the accepted types. - """ - if value is None: - return document.Value(null_value=struct_pb2.NULL_VALUE) - - # Must come before six.integer_types since ``bool`` is an integer subtype. - if isinstance(value, bool): - return document.Value(boolean_value=value) - - if isinstance(value, six.integer_types): - return document.Value(integer_value=value) - - if isinstance(value, float): - return document.Value(double_value=value) - - if isinstance(value, DatetimeWithNanoseconds): - return document.Value(timestamp_value=value.timestamp_pb()) - - if isinstance(value, datetime.datetime): - return document.Value(timestamp_value=_datetime_to_pb_timestamp(value)) - - if isinstance(value, six.text_type): - return document.Value(string_value=value) - - if isinstance(value, six.binary_type): - return document.Value(bytes_value=value) - - # NOTE: We avoid doing an isinstance() check for a Document - # here to avoid import cycles. - document_path = getattr(value, "_document_path", None) - if document_path is not None: - return document.Value(reference_value=document_path) - - if isinstance(value, GeoPoint): - return document.Value(geo_point_value=value.to_protobuf()) - - if isinstance(value, list): - value_list = [encode_value(element) for element in value] - value_pb = document.ArrayValue(values=value_list) - return document.Value(array_value=value_pb) - - if isinstance(value, dict): - value_dict = encode_dict(value) - value_pb = document.MapValue(fields=value_dict) - return document.Value(map_value=value_pb) - - raise TypeError( - "Cannot convert to a Firestore Value", value, "Invalid type", type(value) - ) - - -def encode_dict(values_dict): - """Encode a dictionary into protobuf ``Value``-s. - - Args: - values_dict (dict): The dictionary to encode as protobuf fields. - - Returns: - Dict[str, ~google.cloud.firestore_v1beta1.types.Value]: A - dictionary of string keys and ``Value`` protobufs as dictionary - values. - """ - return {key: encode_value(value) for key, value in six.iteritems(values_dict)} - - -def reference_value_to_document(reference_value, client): - """Convert a reference value string to a document. - - Args: - reference_value (str): A document reference value. - client (~.firestore_v1beta1.client.Client): A client that has - a document factory. - - Returns: - ~.firestore_v1beta1.document.DocumentReference: The document - corresponding to ``reference_value``. - - Raises: - ValueError: If the ``reference_value`` is not of the expected - format: ``projects/{project}/databases/{database}/documents/...``. - ValueError: If the ``reference_value`` does not come from the same - project / database combination as the ``client``. - """ - # The first 5 parts are - # projects, {project}, databases, {database}, documents - parts = reference_value.split(DOCUMENT_PATH_DELIMITER, 5) - if len(parts) != 6: - msg = BAD_REFERENCE_ERROR.format(reference_value) - raise ValueError(msg) - - # The sixth part is `a/b/c/d` (i.e. the document path) - document = client.document(parts[-1]) - if document._document_path != reference_value: - msg = WRONG_APP_REFERENCE.format(reference_value, client._database_string) - raise ValueError(msg) - - return document - - -def decode_value(value, client): - """Converts a Firestore protobuf ``Value`` to a native Python value. - - Args: - value (google.cloud.firestore_v1beta1.types.Value): A - Firestore protobuf to be decoded / parsed / converted. - client (~.firestore_v1beta1.client.Client): A client that has - a document factory. - - Returns: - Union[NoneType, bool, int, float, datetime.datetime, \ - str, bytes, dict, ~google.cloud.Firestore.GeoPoint]: A native - Python value converted from the ``value``. - - Raises: - NotImplementedError: If the ``value_type`` is ``reference_value``. - ValueError: If the ``value_type`` is unknown. - """ - value_type = value._pb.WhichOneof("value_type") - - if value_type == "null_value": - return None - elif value_type == "boolean_value": - return value.boolean_value - elif value_type == "integer_value": - return value.integer_value - elif value_type == "double_value": - return value.double_value - elif value_type == "timestamp_value": - return DatetimeWithNanoseconds.from_timestamp_pb(value._pb.timestamp_value) - elif value_type == "string_value": - return value.string_value - elif value_type == "bytes_value": - return value.bytes_value - elif value_type == "reference_value": - return reference_value_to_document(value.reference_value, client) - elif value_type == "geo_point_value": - return GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude) - elif value_type == "array_value": - return [decode_value(element, client) for element in value.array_value.values] - elif value_type == "map_value": - return decode_dict(value.map_value.fields, client) - else: - raise ValueError("Unknown ``value_type``", value_type) - - -def decode_dict(value_fields, client): - """Converts a protobuf map of Firestore ``Value``-s. - - Args: - value_fields (google.protobuf.pyext._message.MessageMapContainer): A - protobuf map of Firestore ``Value``-s. - client (~.firestore_v1beta1.client.Client): A client that has - a document factory. - - Returns: - Dict[str, Union[NoneType, bool, int, float, datetime.datetime, \ - str, bytes, dict, ~google.cloud.Firestore.GeoPoint]]: A dictionary - of native Python values converted from the ``value_fields``. - """ - return { - key: decode_value(value, client) for key, value in six.iteritems(value_fields) - } - - -def get_doc_id(document_pb, expected_prefix): - """Parse a document ID from a document protobuf. - - Args: - document_pb (google.cloud.proto.firestore.v1beta1.\ - document.Document): A protobuf for a document that - was created in a ``CreateDocument`` RPC. - expected_prefix (str): The expected collection prefix for the - fully-qualified document name. - - Returns: - str: The document ID from the protobuf. - - Raises: - ValueError: If the name does not begin with the prefix. - """ - prefix, document_id = document_pb.name.rsplit(DOCUMENT_PATH_DELIMITER, 1) - if prefix != expected_prefix: - raise ValueError( - "Unexpected document name", - document_pb.name, - "Expected to begin with", - expected_prefix, - ) - - return document_id - - -_EmptyDict = transforms.Sentinel("Marker for an empty dict value") - - -def extract_fields(document_data, prefix_path, expand_dots=False): - """Do depth-first walk of tree, yielding field_path, value""" - if not document_data: - yield prefix_path, _EmptyDict - else: - for key, value in sorted(six.iteritems(document_data)): - - if expand_dots: - sub_key = FieldPath.from_string(key) - else: - sub_key = FieldPath(key) - - field_path = FieldPath(*(prefix_path.parts + sub_key.parts)) - - if isinstance(value, dict): - for s_path, s_value in extract_fields(value, field_path): - yield s_path, s_value - else: - yield field_path, value - - -def set_field_value(document_data, field_path, value): - """Set a value into a document for a field_path""" - current = document_data - for element in field_path.parts[:-1]: - current = current.setdefault(element, {}) - if value is _EmptyDict: - value = {} - current[field_path.parts[-1]] = value - - -def get_field_value(document_data, field_path): - if not field_path.parts: - raise ValueError("Empty path") - - current = document_data - for element in field_path.parts[:-1]: - current = current[element] - return current[field_path.parts[-1]] - - -class DocumentExtractor(object): - """ Break document data up into actual data and transforms. - - Handle special values such as ``DELETE_FIELD``, ``SERVER_TIMESTAMP``. - - Args: - document_data (dict): - Property names and values to use for sending a change to - a document. - """ - - def __init__(self, document_data): - self.document_data = document_data - self.field_paths = [] - self.deleted_fields = [] - self.server_timestamps = [] - self.array_removes = {} - self.array_unions = {} - self.set_fields = {} - self.empty_document = False - - prefix_path = FieldPath() - iterator = self._get_document_iterator(prefix_path) - - for field_path, value in iterator: - - if field_path == prefix_path and value is _EmptyDict: - self.empty_document = True - - elif value is transforms.DELETE_FIELD: - self.deleted_fields.append(field_path) - - elif value is transforms.SERVER_TIMESTAMP: - self.server_timestamps.append(field_path) - - elif isinstance(value, transforms.ArrayRemove): - self.array_removes[field_path] = value.values - - elif isinstance(value, transforms.ArrayUnion): - self.array_unions[field_path] = value.values - - else: - self.field_paths.append(field_path) - set_field_value(self.set_fields, field_path, value) - - def _get_document_iterator(self, prefix_path): - return extract_fields(self.document_data, prefix_path) - - @property - def has_transforms(self): - return bool(self.server_timestamps or self.array_removes or self.array_unions) - - @property - def transform_paths(self): - return sorted( - self.server_timestamps + list(self.array_removes) + list(self.array_unions) - ) - - def _get_update_mask(self, allow_empty_mask=False): - return None - - def get_update_pb(self, document_path, exists=None, allow_empty_mask=False): - - if exists is not None: - current_document = common.Precondition(exists=exists) - else: - current_document = None - - update_pb = write.Write( - update=document.Document( - name=document_path, fields=encode_dict(self.set_fields) - ), - update_mask=self._get_update_mask(allow_empty_mask), - current_document=current_document, - ) - - return update_pb - - def get_transform_pb(self, document_path, exists=None): - def make_array_value(values): - value_list = [encode_value(element) for element in values] - return document.ArrayValue(values=value_list) - - path_field_transforms = ( - [ - ( - path, - write.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - set_to_server_value=REQUEST_TIME_ENUM, - ), - ) - for path in self.server_timestamps - ] - + [ - ( - path, - write.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - remove_all_from_array=make_array_value(values), - ), - ) - for path, values in self.array_removes.items() - ] - + [ - ( - path, - write.DocumentTransform.FieldTransform( - field_path=path.to_api_repr(), - append_missing_elements=make_array_value(values), - ), - ) - for path, values in self.array_unions.items() - ] - ) - field_transforms = [ - transform for path, transform in sorted(path_field_transforms) - ] - transform_pb = write.Write( - transform=write.DocumentTransform( - document=document_path, field_transforms=field_transforms - ) - ) - if exists is not None: - transform_pb._pb.current_document.CopyFrom( - common.Precondition(exists=exists)._pb - ) - - return transform_pb - - -def pbs_for_create(document_path, document_data): - """Make ``Write`` protobufs for ``create()`` methods. - - Args: - document_path (str): A fully-qualified document path. - document_data (dict): Property names and values to use for - creating a document. - - Returns: - List[google.cloud.firestore_v1beta1.types.Write]: One or two - ``Write`` protobuf instances for ``create()``. - """ - extractor = DocumentExtractor(document_data) - - if extractor.deleted_fields: - raise ValueError("Cannot apply DELETE_FIELD in a create request.") - - write_pbs = [] - - # Conformance tests require skipping the 'update_pb' if the document - # contains only transforms. - if extractor.empty_document or extractor.set_fields: - write_pbs.append(extractor.get_update_pb(document_path, exists=False)) - - if extractor.has_transforms: - exists = None if write_pbs else False - transform_pb = extractor.get_transform_pb(document_path, exists) - write_pbs.append(transform_pb) - - return write_pbs - - -def pbs_for_set_no_merge(document_path, document_data): - """Make ``Write`` protobufs for ``set()`` methods. - - Args: - document_path (str): A fully-qualified document path. - document_data (dict): Property names and values to use for - replacing a document. - - Returns: - List[google.cloud.firestore_v1beta1.types.Write]: One - or two ``Write`` protobuf instances for ``set()``. - """ - extractor = DocumentExtractor(document_data) - - if extractor.deleted_fields: - raise ValueError( - "Cannot apply DELETE_FIELD in a set request without " - "specifying 'merge=True' or 'merge=[field_paths]'." - ) - - # Conformance tests require send the 'update_pb' even if the document - # contains only transforms. - write_pbs = [extractor.get_update_pb(document_path)] - - if extractor.has_transforms: - transform_pb = extractor.get_transform_pb(document_path) - write_pbs.append(transform_pb) - - return write_pbs - - -class DocumentExtractorForMerge(DocumentExtractor): - """ Break document data up into actual data and transforms. - """ - - def __init__(self, document_data): - super(DocumentExtractorForMerge, self).__init__(document_data) - self.data_merge = [] - self.transform_merge = [] - self.merge = [] - - @property - def has_updates(self): - # for whatever reason, the conformance tests want to see the parent - # of nested transform paths in the update mask - # (see set-st-merge-nonleaf-alone.textproto) - update_paths = set(self.data_merge) - - for transform_path in self.transform_paths: - if len(transform_path.parts) > 1: - parent_fp = FieldPath(*transform_path.parts[:-1]) - update_paths.add(parent_fp) - - return bool(update_paths) - - def _apply_merge_all(self): - self.data_merge = sorted(self.field_paths + self.deleted_fields) - # TODO: other transforms - self.transform_merge = self.transform_paths - self.merge = sorted(self.data_merge + self.transform_paths) - - def _construct_merge_paths(self, merge): - for merge_field in merge: - if isinstance(merge_field, FieldPath): - yield merge_field - else: - yield FieldPath(*parse_field_path(merge_field)) - - def _normalize_merge_paths(self, merge): - merge_paths = sorted(self._construct_merge_paths(merge)) - - # Raise if any merge path is a parent of another. Leverage sorting - # to avoid quadratic behavior. - for index in range(len(merge_paths) - 1): - lhs, rhs = merge_paths[index], merge_paths[index + 1] - if lhs.eq_or_parent(rhs): - raise ValueError("Merge paths overlap: {}, {}".format(lhs, rhs)) - - for merge_path in merge_paths: - if merge_path in self.deleted_fields: - continue - try: - get_field_value(self.document_data, merge_path) - except KeyError: - raise ValueError("Invalid merge path: {}".format(merge_path)) - - return merge_paths - - def _apply_merge_paths(self, merge): - - if self.empty_document: - raise ValueError("Cannot merge specific fields with empty document.") - - merge_paths = self._normalize_merge_paths(merge) - - del self.data_merge[:] - del self.transform_merge[:] - self.merge = merge_paths - - for merge_path in merge_paths: - - if merge_path in self.transform_paths: - self.transform_merge.append(merge_path) - - for field_path in self.field_paths: - if merge_path.eq_or_parent(field_path): - self.data_merge.append(field_path) - - # Clear out data for fields not merged. - merged_set_fields = {} - for field_path in self.data_merge: - value = get_field_value(self.document_data, field_path) - set_field_value(merged_set_fields, field_path, value) - self.set_fields = merged_set_fields - - unmerged_deleted_fields = [ - field_path - for field_path in self.deleted_fields - if field_path not in self.merge - ] - if unmerged_deleted_fields: - raise ValueError( - "Cannot delete unmerged fields: {}".format(unmerged_deleted_fields) - ) - self.data_merge = sorted(self.data_merge + self.deleted_fields) - - # Keep only transforms which are within merge. - merged_transform_paths = set() - for merge_path in self.merge: - tranform_merge_paths = [ - transform_path - for transform_path in self.transform_paths - if merge_path.eq_or_parent(transform_path) - ] - merged_transform_paths.update(tranform_merge_paths) - - self.server_timestamps = [ - path for path in self.server_timestamps if path in merged_transform_paths - ] - - self.array_removes = { - path: values - for path, values in self.array_removes.items() - if path in merged_transform_paths - } - - self.array_unions = { - path: values - for path, values in self.array_unions.items() - if path in merged_transform_paths - } - - def apply_merge(self, merge): - if merge is True: # merge all fields - self._apply_merge_all() - else: - self._apply_merge_paths(merge) - - def _get_update_mask(self, allow_empty_mask=False): - # Mask uses dotted / quoted paths. - mask_paths = [ - field_path.to_api_repr() - for field_path in self.merge - if field_path not in self.transform_merge - ] - - if mask_paths or allow_empty_mask: - return common.DocumentMask(field_paths=mask_paths) - - -def pbs_for_set_with_merge(document_path, document_data, merge): - """Make ``Write`` protobufs for ``set()`` methods. - - Args: - document_path (str): A fully-qualified document path. - document_data (dict): Property names and values to use for - replacing a document. - merge (Optional[bool] or Optional[List]): - If True, merge all fields; else, merge only the named fields. - - Returns: - List[google.cloud.firestore_v1beta1.types.Write]: One - or two ``Write`` protobuf instances for ``set()``. - """ - extractor = DocumentExtractorForMerge(document_data) - extractor.apply_merge(merge) - - merge_empty = not document_data - - write_pbs = [] - - if extractor.has_updates or merge_empty: - write_pbs.append( - extractor.get_update_pb(document_path, allow_empty_mask=merge_empty) - ) - - if extractor.transform_paths: - transform_pb = extractor.get_transform_pb(document_path) - write_pbs.append(transform_pb) - - return write_pbs - - -class DocumentExtractorForUpdate(DocumentExtractor): - """ Break document data up into actual data and transforms. - """ - - def __init__(self, document_data): - super(DocumentExtractorForUpdate, self).__init__(document_data) - self.top_level_paths = sorted( - [FieldPath.from_string(key) for key in document_data] - ) - tops = set(self.top_level_paths) - for top_level_path in self.top_level_paths: - for ancestor in top_level_path.lineage(): - if ancestor in tops: - raise ValueError( - "Conflicting field path: {}, {}".format( - top_level_path, ancestor - ) - ) - - for field_path in self.deleted_fields: - if field_path not in tops: - raise ValueError( - "Cannot update with nest delete: {}".format(field_path) - ) - - def _get_document_iterator(self, prefix_path): - return extract_fields(self.document_data, prefix_path, expand_dots=True) - - def _get_update_mask(self, allow_empty_mask=False): - mask_paths = [] - for field_path in self.top_level_paths: - if field_path not in self.transform_paths: - mask_paths.append(field_path.to_api_repr()) - - return common.DocumentMask(field_paths=mask_paths) - - -def pbs_for_update(document_path, field_updates, option): - """Make ``Write`` protobufs for ``update()`` methods. - - Args: - document_path (str): A fully-qualified document path. - field_updates (dict): Field names or paths to update and values - to update with. - option (optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - - Returns: - List[google.cloud.firestore_v1beta1.types.Write]: One - or two ``Write`` protobuf instances for ``update()``. - """ - extractor = DocumentExtractorForUpdate(field_updates) - - if extractor.empty_document: - raise ValueError("Cannot update with an empty document.") - - if option is None: # Default is to use ``exists=True``. - option = ExistsOption(exists=True) - - write_pbs = [] - - if extractor.field_paths or extractor.deleted_fields: - update_pb = extractor.get_update_pb(document_path) - option.modify_write(update_pb) - write_pbs.append(update_pb) - - if extractor.has_transforms: - transform_pb = extractor.get_transform_pb(document_path) - if not write_pbs: - # NOTE: set the write option on the ``transform_pb`` only if there - # is no ``update_pb`` - option.modify_write(transform_pb) - write_pbs.append(transform_pb) - - return write_pbs - - -def pb_for_delete(document_path, option): - """Make a ``Write`` protobuf for ``delete()`` methods. - - Args: - document_path (str): A fully-qualified document path. - option (optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - - Returns: - google.cloud.firestore_v1beta1.types.Write: A - ``Write`` protobuf instance for the ``delete()``. - """ - write_pb = write.Write(delete=document_path) - if option is not None: - option.modify_write(write_pb) - - return write_pb - - -class ReadAfterWriteError(Exception): - """Raised when a read is attempted after a write. - - Raised by "read" methods that use transactions. - """ - - -def get_transaction_id(transaction, read_operation=True): - """Get the transaction ID from a ``Transaction`` object. - - Args: - transaction (Optional[~.firestore_v1beta1.transaction.\ - Transaction]): An existing transaction that this query will - run in. - read_operation (Optional[bool]): Indicates if the transaction ID - will be used in a read operation. Defaults to :data:`True`. - - Returns: - Optional[bytes]: The ID of the transaction, or :data:`None` if the - ``transaction`` is :data:`None`. - - Raises: - ValueError: If the ``transaction`` is not in progress (only if - ``transaction`` is not :data:`None`). - ReadAfterWriteError: If the ``transaction`` has writes stored on - it and ``read_operation`` is :data:`True`. - """ - if transaction is None: - return None - else: - if not transaction.in_progress: - raise ValueError(INACTIVE_TXN) - if read_operation and len(transaction._write_pbs) > 0: - raise ReadAfterWriteError(READ_AFTER_WRITE_ERROR) - return transaction.id - - -def metadata_with_prefix(prefix, **kw): - """Create RPC metadata containing a prefix. - - Args: - prefix (str): appropriate resource path. - - Returns: - List[Tuple[str, str]]: RPC metadata with supplied prefix - """ - return [("google-cloud-resource-prefix", prefix)] - - -class WriteOption(object): - """Option used to assert a condition on a write operation.""" - - def modify_write(self, write, no_create_msg=None): - """Modify a ``Write`` protobuf based on the state of this write option. - - This is a virtual method intended to be implemented by subclasses. - - Args: - write (google.cloud.firestore_v1beta1.types.Write): A - ``Write`` protobuf instance to be modified with a precondition - determined by the state of this option. - no_create_msg (Optional[str]): A message to use to indicate that - a create operation is not allowed. - - Raises: - NotImplementedError: Always, this method is virtual. - """ - raise NotImplementedError - - -class LastUpdateOption(WriteOption): - """Option used to assert a "last update" condition on a write operation. - - This will typically be created by - :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`. - - Args: - last_update_time (google.protobuf.timestamp_pb2.Timestamp): A - timestamp. When set, the target document must exist and have - been last updated at that time. Protobuf ``update_time`` timestamps - are typically returned from methods that perform write operations - as part of a "write result" protobuf or directly. - """ - - def __init__(self, last_update_time): - self._last_update_time = last_update_time - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self._last_update_time == other._last_update_time - - def modify_write(self, write, **unused_kwargs): - """Modify a ``Write`` protobuf based on the state of this write option. - - The ``last_update_time`` is added to ``write_pb`` as an "update time" - precondition. When set, the target document must exist and have been - last updated at that time. - - Args: - write (google.cloud.firestore_v1beta1.types.Write): A - ``Write`` protobuf instance to be modified with a precondition - determined by the state of this option. - unused_kwargs (Dict[str, Any]): Keyword arguments accepted by - other subclasses that are unused here. - """ - current_doc = types.Precondition(update_time=self._last_update_time) - write._pb.current_document.CopyFrom(current_doc._pb) - - -class ExistsOption(WriteOption): - """Option used to assert existence on a write operation. - - This will typically be created by - :meth:`~google.cloud.firestore_v1beta1.client.Client.write_option`. - - Args: - exists (bool): Indicates if the document being modified - should already exist. - """ - - def __init__(self, exists): - self._exists = exists - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self._exists == other._exists - - def modify_write(self, write, **unused_kwargs): - """Modify a ``Write`` protobuf based on the state of this write option. - - If: - - * ``exists=True``, adds a precondition that requires existence - * ``exists=False``, adds a precondition that requires non-existence - - Args: - write (google.cloud.firestore_v1beta1.types.Write): A - ``Write`` protobuf instance to be modified with a precondition - determined by the state of this option. - unused_kwargs (Dict[str, Any]): Keyword arguments accepted by - other subclasses that are unused here. - """ - current_doc = types.Precondition(exists=self._exists) - write._pb.current_document.CopyFrom(current_doc._pb) diff --git a/google/cloud/firestore_v1beta1/batch.py b/google/cloud/firestore_v1beta1/batch.py deleted file mode 100644 index 33e347f7e..000000000 --- a/google/cloud/firestore_v1beta1/batch.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpers for batch requests to the Google Cloud Firestore API.""" - - -from google.cloud.firestore_v1beta1 import _helpers - - -class WriteBatch(object): - """Accumulate write operations to be sent in a batch. - - This has the same set of methods for write operations that - :class:`~google.cloud.firestore_v1beta1.document.DocumentReference` - does, e.g. - :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.create`. - - Args: - client (~.firestore_v1beta1.client.Client): The client that - created this batch. - """ - - def __init__(self, client): - self._client = client - self._write_pbs = [] - self.write_results = None - self.commit_time = None - - def _add_write_pbs(self, write_pbs): - """Add `Write`` protobufs to this transaction. - - This method intended to be over-ridden by subclasses. - - Args: - write_pbs (List[google.cloud.proto.firestore.v1beta1.\ - write.Write]): A list of write protobufs to be added. - """ - self._write_pbs.extend(write_pbs) - - def create(self, reference, document_data): - """Add a "change" to this batch to create a document. - - If the document given by ``reference`` already exists, then this - batch will fail when :meth:`commit`-ed. - - Args: - reference (~.firestore_v1beta1.document.DocumentReference): A - document reference to be created in this batch. - document_data (dict): Property names and values to use for - creating a document. - """ - write_pbs = _helpers.pbs_for_create(reference._document_path, document_data) - self._add_write_pbs(write_pbs) - - def set(self, reference, document_data, merge=False): - """Add a "change" to replace a document. - - See - :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.set` - for more information on how ``option`` determines how the change is - applied. - - Args: - reference (~.firestore_v1beta1.document.DocumentReference): - A document reference that will have values set in this batch. - document_data (dict): - Property names and values to use for replacing a document. - merge (Optional[bool] or Optional[List]): - If True, apply merging instead of overwriting the state - of the document. - """ - if merge is not False: - write_pbs = _helpers.pbs_for_set_with_merge( - reference._document_path, document_data, merge - ) - else: - write_pbs = _helpers.pbs_for_set_no_merge( - reference._document_path, document_data - ) - - self._add_write_pbs(write_pbs) - - def update(self, reference, field_updates, option=None): - """Add a "change" to update a document. - - See - :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.update` - for more information on ``field_updates`` and ``option``. - - Args: - reference (~.firestore_v1beta1.document.DocumentReference): A - document reference that will be deleted in this batch. - field_updates (dict): Field names or paths to update and values - to update with. - option (Optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - """ - if option.__class__.__name__ == "ExistsOption": - raise ValueError("you must not pass an explicit write option to " "update.") - write_pbs = _helpers.pbs_for_update( - reference._document_path, field_updates, option - ) - self._add_write_pbs(write_pbs) - - def delete(self, reference, option=None): - """Add a "change" to delete a document. - - See - :meth:`~google.cloud.firestore_v1beta1.document.DocumentReference.delete` - for more information on how ``option`` determines how the change is - applied. - - Args: - reference (~.firestore_v1beta1.document.DocumentReference): A - document reference that will be deleted in this batch. - option (Optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - """ - write_pb = _helpers.pb_for_delete(reference._document_path, option) - self._add_write_pbs([write_pb]) - - def commit(self): - """Commit the changes accumulated in this batch. - - Returns: - List[google.cloud.proto.firestore.v1beta1.\ - write.WriteResult, ...]: The write results corresponding - to the changes committed, returned in the same order as the - changes were applied to this batch. A write result contains an - ``update_time`` field. - """ - commit_response = self._client._firestore_api.commit( - request={ - "database": self._client._database_string, - "writes": self._write_pbs, - "transaction": None, - }, - metadata=self._client._rpc_metadata, - ) - - self._write_pbs = [] - self.write_results = results = list(commit_response.write_results) - self.commit_time = commit_response.commit_time - return results - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - if exc_type is None: - self.commit() diff --git a/google/cloud/firestore_v1beta1/client.py b/google/cloud/firestore_v1beta1/client.py deleted file mode 100644 index 83eb952d5..000000000 --- a/google/cloud/firestore_v1beta1/client.py +++ /dev/null @@ -1,546 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Cloud Firestore API. - -This is the base from which all interactions with the API occur. - -In the hierarchy of API concepts - -* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a - :class:`~google.cloud.firestore_v1beta1.collection.CollectionReference` -* a :class:`~google.cloud.firestore_v1beta1.client.Client` owns a - :class:`~google.cloud.firestore_v1beta1.document.DocumentReference` -""" -import warnings -import google.api_core.path_template -from google.cloud.client import ClientWithProject - -from google.cloud.firestore_v1beta1 import _helpers -from google.cloud.firestore_v1beta1 import types -from google.cloud.firestore_v1beta1.batch import WriteBatch -from google.cloud.firestore_v1beta1.collection import CollectionReference -from google.cloud.firestore_v1beta1.document import DocumentReference -from google.cloud.firestore_v1beta1.document import DocumentSnapshot -from google.cloud.firestore_v1beta1.field_path import render_field_path -from google.cloud.firestore_v1beta1.services.firestore import client as firestore_client -from google.cloud.firestore_v1beta1.services.firestore.transports import ( - grpc as firestore_grpc_transport, -) -from google.cloud.firestore_v1beta1.transaction import Transaction - - -DEFAULT_DATABASE = "(default)" -"""str: The default database used in a :class:`~google.cloud.firestore.client.Client`.""" -_BAD_OPTION_ERR = ( - "Exactly one of ``last_update_time`` or ``exists`` " "must be provided." -) -_BAD_DOC_TEMPLATE = ( - "Document {!r} appeared in response but was not present among references" -) -_ACTIVE_TXN = "There is already an active transaction." -_INACTIVE_TXN = "There is no active transaction." -_V1BETA1_DEPRECATED_MESSAGE = ( - "The 'v1beta1' API endpoint is deprecated. " - "The client/library which supports it will be removed in a future release." -) - - -class Client(ClientWithProject): - """Client for interacting with Google Cloud Firestore API. - - .. note:: - - Since the Cloud Firestore API requires the gRPC transport, no - ``_http`` argument is accepted by this class. - - Args: - project (Optional[str]): The project which the client acts on behalf - of. If not passed, falls back to the default inferred - from the environment. - credentials (Optional[~google.auth.credentials.Credentials]): The - OAuth2 Credentials to use for this client. If not passed, falls - back to the default inferred from the environment. - database (Optional[str]): The database name that the client targets. - For now, :attr:`DEFAULT_DATABASE` (the default value) is the - only valid database. - """ - - SCOPE = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - """The scopes required for authenticating with the Firestore service.""" - - _firestore_api_internal = None - _database_string_internal = None - _rpc_metadata_internal = None - - def __init__(self, project=None, credentials=None, database=DEFAULT_DATABASE): - warnings.warn(_V1BETA1_DEPRECATED_MESSAGE, DeprecationWarning, stacklevel=2) - # NOTE: This API has no use for the _http argument, but sending it - # will have no impact since the _http() @property only lazily - # creates a working HTTP object. - super(Client, self).__init__( - project=project, credentials=credentials, _http=None - ) - self._database = database - - @property - def _firestore_api(self): - """Lazy-loading getter GAPIC Firestore API. - - Returns: - ~.gapic.firestore.v1beta1.firestore_client.FirestoreClient: The - GAPIC client with the credentials of the current client. - """ - if self._firestore_api_internal is None: - # Use a custom channel. - # We need this in order to set appropriate keepalive options. - channel = firestore_grpc_transport.FirestoreGrpcTransport.create_channel( - self._target, - credentials=self._credentials, - options={"grpc.keepalive_time_ms": 30000}.items(), - ) - - self._transport = firestore_grpc_transport.FirestoreGrpcTransport( - host=self._target, channel=channel - ) - - self._firestore_api_internal = firestore_client.FirestoreClient( - transport=self._transport - ) - - return self._firestore_api_internal - - @property - def _target(self): - """Return the target (where the API is). - - Returns: - str: The location of the API. - """ - return firestore_client.FirestoreClient.DEFAULT_ENDPOINT - - @property - def _database_string(self): - """The database string corresponding to this client's project. - - This value is lazy-loaded and cached. - - Will be of the form - - ``projects/{project_id}/databases/{database_id}`` - - but ``database_id == '(default)'`` for the time being. - - Returns: - str: The fully-qualified database string for the current - project. (The default database is also in this string.) - """ - if self._database_string_internal is None: - db_str = google.api_core.path_template.expand( - "projects/{project}/databases/{database}", - project=self.project, - database=self._database, - ) - self._database_string_internal = db_str - - return self._database_string_internal - - @property - def _rpc_metadata(self): - """The RPC metadata for this client's associated database. - - Returns: - Sequence[Tuple(str, str)]: RPC metadata with resource prefix - for the database associated with this client. - """ - if self._rpc_metadata_internal is None: - self._rpc_metadata_internal = _helpers.metadata_with_prefix( - self._database_string - ) - - return self._rpc_metadata_internal - - def collection(self, *collection_path): - """Get a reference to a collection. - - For a top-level collection: - - .. code-block:: python - - >>> client.collection('top') - - For a sub-collection: - - .. code-block:: python - - >>> client.collection('mydocs/doc/subcol') - >>> # is the same as - >>> client.collection('mydocs', 'doc', 'subcol') - - Sub-collections can be nested deeper in a similar fashion. - - Args: - collection_path (Tuple[str, ...]): Can either be - - * A single ``/``-delimited path to a collection - * A tuple of collection path segments - - Returns: - ~.firestore_v1beta1.collection.CollectionReference: A reference - to a collection in the Firestore database. - """ - if len(collection_path) == 1: - path = collection_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER) - else: - path = collection_path - - return CollectionReference(*path, client=self) - - def document(self, *document_path): - """Get a reference to a document in a collection. - - For a top-level document: - - .. code-block:: python - - >>> client.document('collek/shun') - >>> # is the same as - >>> client.document('collek', 'shun') - - For a document in a sub-collection: - - .. code-block:: python - - >>> client.document('mydocs/doc/subcol/child') - >>> # is the same as - >>> client.document('mydocs', 'doc', 'subcol', 'child') - - Documents in sub-collections can be nested deeper in a similar fashion. - - Args: - document_path (Tuple[str, ...]): Can either be - - * A single ``/``-delimited path to a document - * A tuple of document path segments - - Returns: - ~.firestore_v1beta1.document.DocumentReference: A reference - to a document in a collection. - """ - if len(document_path) == 1: - path = document_path[0].split(_helpers.DOCUMENT_PATH_DELIMITER) - else: - path = document_path - - return DocumentReference(*path, client=self) - - @staticmethod - def field_path(*field_names): - """Create a **field path** from a list of nested field names. - - A **field path** is a ``.``-delimited concatenation of the field - names. It is used to represent a nested field. For example, - in the data - - .. code-block:: python - - data = { - 'aa': { - 'bb': { - 'cc': 10, - }, - }, - } - - the field path ``'aa.bb.cc'`` represents the data stored in - ``data['aa']['bb']['cc']``. - - Args: - field_names (Tuple[str, ...]): The list of field names. - - Returns: - str: The ``.``-delimited field path. - """ - return render_field_path(field_names) - - @staticmethod - def write_option(**kwargs): - """Create a write option for write operations. - - Write operations include :meth:`~google.cloud.DocumentReference.set`, - :meth:`~google.cloud.DocumentReference.update` and - :meth:`~google.cloud.DocumentReference.delete`. - - One of the following keyword arguments must be provided: - - * ``last_update_time`` (:class:`google.protobuf.timestamp_pb2.\ - Timestamp`): A timestamp. When set, the target document must - exist and have been last updated at that time. Protobuf - ``update_time`` timestamps are typically returned from methods - that perform write operations as part of a "write result" - protobuf or directly. - * ``exists`` (:class:`bool`): Indicates if the document being modified - should already exist. - - Providing no argument would make the option have no effect (so - it is not allowed). Providing multiple would be an apparent - contradiction, since ``last_update_time`` assumes that the - document **was** updated (it can't have been updated if it - doesn't exist) and ``exists`` indicate that it is unknown if the - document exists or not. - - Args: - kwargs (Dict[str, Any]): The keyword arguments described above. - - Raises: - TypeError: If anything other than exactly one argument is - provided by the caller. - """ - if len(kwargs) != 1: - raise TypeError(_BAD_OPTION_ERR) - - name, value = kwargs.popitem() - if name == "last_update_time": - return _helpers.LastUpdateOption(value) - elif name == "exists": - return _helpers.ExistsOption(value) - else: - extra = "{!r} was provided".format(name) - raise TypeError(_BAD_OPTION_ERR, extra) - - def get_all(self, references, field_paths=None, transaction=None): - """Retrieve a batch of documents. - - .. note:: - - Documents returned by this method are not guaranteed to be - returned in the same order that they are given in ``references``. - - .. note:: - - If multiple ``references`` refer to the same document, the server - will only return one result. - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - If a ``transaction`` is used and it already has write operations - added, this method cannot be used (i.e. read-after-write is not - allowed). - - Args: - references (List[.DocumentReference, ...]): Iterable of document - references to be retrieved. - field_paths (Optional[Iterable[str, ...]]): An iterable of field - paths (``.``-delimited list of field names) to use as a - projection of document fields in the returned results. If - no value is provided, all fields will be returned. - transaction (Optional[~.firestore_v1beta1.transaction.\ - Transaction]): An existing transaction that these - ``references`` will be retrieved in. - - Yields: - .DocumentSnapshot: The next document snapshot that fulfills the - query, or :data:`None` if the document does not exist. - """ - document_paths, reference_map = _reference_info(references) - mask = _get_doc_mask(field_paths) - response_iterator = self._firestore_api.batch_get_documents( - request={ - "database": self._database_string, - "documents": document_paths, - "mask": mask, - "transaction": _helpers.get_transaction_id(transaction), - }, - metadata=self._rpc_metadata, - ) - - for get_doc_response in response_iterator: - yield _parse_batch_get(get_doc_response, reference_map, self) - - def collections(self): - """List top-level collections of the client's database. - - Returns: - Sequence[~.firestore_v1beta1.collection.CollectionReference]: - iterator of subcollections of the current document. - """ - iterator = self._firestore_api.list_collection_ids( - request={"parent": self._database_string}, metadata=self._rpc_metadata - ) - iterator.client = self - iterator.item_to_value = _item_to_collection_ref - return iterator - - def batch(self): - """Get a batch instance from this client. - - Returns: - ~.firestore_v1beta1.batch.WriteBatch: A "write" batch to be - used for accumulating document changes and sending the changes - all at once. - """ - return WriteBatch(self) - - def transaction(self, **kwargs): - """Get a transaction that uses this client. - - See :class:`~google.cloud.firestore_v1beta1.transaction.Transaction` - for more information on transactions and the constructor arguments. - - Args: - kwargs (Dict[str, Any]): The keyword arguments (other than - ``client``) to pass along to the - :class:`~google.cloud.firestore_v1beta1.transaction.Transaction` - constructor. - - Returns: - ~.firestore_v1beta1.transaction.Transaction: A transaction - attached to this client. - """ - return Transaction(self, **kwargs) - - -def _reference_info(references): - """Get information about document references. - - Helper for :meth:`~google.cloud.firestore_v1beta1.client.Client.get_all`. - - Args: - references (List[.DocumentReference, ...]): Iterable of document - references. - - Returns: - Tuple[List[str, ...], Dict[str, .DocumentReference]]: A two-tuple of - - * fully-qualified documents paths for each reference in ``references`` - * a mapping from the paths to the original reference. (If multiple - ``references`` contains multiple references to the same document, - that key will be overwritten in the result.) - """ - document_paths = [] - reference_map = {} - for reference in references: - doc_path = reference._document_path - document_paths.append(doc_path) - reference_map[doc_path] = reference - - return document_paths, reference_map - - -def _get_reference(document_path, reference_map): - """Get a document reference from a dictionary. - - This just wraps a simple dictionary look-up with a helpful error that is - specific to :meth:`~google.cloud.firestore.client.Client.get_all`, the - **public** caller of this function. - - Args: - document_path (str): A fully-qualified document path. - reference_map (Dict[str, .DocumentReference]): A mapping (produced - by :func:`_reference_info`) of fully-qualified document paths to - document references. - - Returns: - .DocumentReference: The matching reference. - - Raises: - ValueError: If ``document_path`` has not been encountered. - """ - try: - return reference_map[document_path] - except KeyError: - msg = _BAD_DOC_TEMPLATE.format(document_path) - raise ValueError(msg) - - -def _parse_batch_get(get_doc_response, reference_map, client): - """Parse a `BatchGetDocumentsResponse` protobuf. - - Args: - get_doc_response (~google.cloud.proto.firestore.v1beta1.\ - firestore.BatchGetDocumentsResponse): A single response (from - a stream) containing the "get" response for a document. - reference_map (Dict[str, .DocumentReference]): A mapping (produced - by :func:`_reference_info`) of fully-qualified document paths to - document references. - client (~.firestore_v1beta1.client.Client): A client that has - a document factory. - - Returns: - [.DocumentSnapshot]: The retrieved snapshot. - - Raises: - ValueError: If the response has a ``result`` field (a oneof) other - than ``found`` or ``missing``. - """ - result_type = get_doc_response._pb.WhichOneof("result") - if result_type == "found": - reference = _get_reference(get_doc_response.found.name, reference_map) - data = _helpers.decode_dict(get_doc_response.found.fields, client) - snapshot = DocumentSnapshot( - reference, - data, - exists=True, - read_time=get_doc_response.read_time, - create_time=get_doc_response.found.create_time, - update_time=get_doc_response.found.update_time, - ) - elif result_type == "missing": - snapshot = DocumentSnapshot( - None, - None, - exists=False, - read_time=get_doc_response.read_time, - create_time=None, - update_time=None, - ) - else: - raise ValueError( - "`BatchGetDocumentsResponse.result` (a oneof) had a field other " - "than `found` or `missing` set, or was unset" - ) - return snapshot - - -def _get_doc_mask(field_paths): - """Get a document mask if field paths are provided. - - Args: - field_paths (Optional[Iterable[str, ...]]): An iterable of field - paths (``.``-delimited list of field names) to use as a - projection of document fields in the returned results. - - Returns: - Optional[google.cloud.firestore_v1beta1.types.DocumentMask]: A mask - to project documents to a restricted set of field paths. - """ - if field_paths is None: - return None - else: - return types.DocumentMask(field_paths=field_paths) - - -def _item_to_collection_ref(iterator, item): - """Convert collection ID to collection ref. - - Args: - iterator (google.api_core.page_iterator.GRPCIterator): - iterator response - item (str): ID of the collection - """ - return iterator.client.collection(item) diff --git a/google/cloud/firestore_v1beta1/collection.py b/google/cloud/firestore_v1beta1/collection.py deleted file mode 100644 index db6dffeb8..000000000 --- a/google/cloud/firestore_v1beta1/collection.py +++ /dev/null @@ -1,482 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Classes for representing collections for the Google Cloud Firestore API.""" -import random -import warnings - -import six - -from google.cloud.firestore_v1beta1 import _helpers -from google.cloud.firestore_v1beta1 import query as query_mod -from google.cloud.firestore_v1beta1.types import document as document_pb2 -from google.cloud.firestore_v1beta1.watch import Watch -from google.cloud.firestore_v1beta1 import document - -_AUTO_ID_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - - -class CollectionReference(object): - """A reference to a collection in a Firestore database. - - The collection may already exist or this class can facilitate creation - of documents within the collection. - - Args: - path (Tuple[str, ...]): The components in the collection path. - This is a series of strings representing each collection and - sub-collection ID, as well as the document IDs for any documents - that contain a sub-collection. - kwargs (dict): The keyword arguments for the constructor. The only - supported keyword is ``client`` and it must be a - :class:`~google.cloud.firestore_v1beta1.client.Client` if - provided. It represents the client that created this collection - reference. - - Raises: - ValueError: if - - * the ``path`` is empty - * there are an even number of elements - * a collection ID in ``path`` is not a string - * a document ID in ``path`` is not a string - TypeError: If a keyword other than ``client`` is used. - """ - - def __init__(self, *path, **kwargs): - _helpers.verify_path(path, is_collection=True) - self._path = path - self._client = kwargs.pop("client", None) - if kwargs: - raise TypeError( - "Received unexpected arguments", kwargs, "Only `client` is supported" - ) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self._path == other._path and self._client == other._client - - @property - def id(self): - """The collection identifier. - - Returns: - str: The last component of the path. - """ - return self._path[-1] - - @property - def parent(self): - """Document that owns the current collection. - - Returns: - Optional[~.firestore_v1beta1.document.DocumentReference]: The - parent document, if the current collection is not a - top-level collection. - """ - if len(self._path) == 1: - return None - else: - parent_path = self._path[:-1] - return self._client.document(*parent_path) - - def document(self, document_id=None): - """Create a sub-document underneath the current collection. - - Args: - document_id (Optional[str]): The document identifier - within the current collection. If not provided, will default - to a random 20 character string composed of digits, - uppercase and lowercase and letters. - - Returns: - ~.firestore_v1beta1.document.DocumentReference: The child - document. - """ - if document_id is None: - document_id = _auto_id() - - child_path = self._path + (document_id,) - return self._client.document(*child_path) - - def _parent_info(self): - """Get fully-qualified parent path and prefix for this collection. - - Returns: - Tuple[str, str]: Pair of - - * the fully-qualified (with database and project) path to the - parent of this collection (will either be the database path - or a document path). - * the prefix to a document in this collection. - """ - parent_doc = self.parent - if parent_doc is None: - parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join( - (self._client._database_string, "documents") - ) - else: - parent_path = parent_doc._document_path - - expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, self.id)) - return parent_path, expected_prefix - - def add(self, document_data, document_id=None): - """Create a document in the Firestore database with the provided data. - - Args: - document_data (dict): Property names and values to use for - creating the document. - document_id (Optional[str]): The document identifier within the - current collection. If not provided, an ID will be - automatically assigned by the server (the assigned ID will be - a random 20 character string composed of digits, - uppercase and lowercase letters). - - Returns: - Tuple[google.protobuf.timestamp_pb2.Timestamp, \ - ~.firestore_v1beta1.document.DocumentReference]: Pair of - - * The ``update_time`` when the document was created (or - overwritten). - * A document reference for the created document. - - Raises: - ~google.cloud.exceptions.Conflict: If ``document_id`` is provided - and the document already exists. - """ - if document_id is None: - parent_path, expected_prefix = self._parent_info() - - document_pb = document_pb2.Document() - - created_document_pb = self._client._firestore_api.create_document( - request={ - "parent": parent_path, - "collection_id": self.id, - "document": None, - "document_id": document_pb, - "mask": None, - }, - metadata=self._client._rpc_metadata, - ) - - new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix) - document_ref = self.document(new_document_id) - set_result = document_ref.set(document_data) - return set_result.update_time, document_ref - else: - document_ref = self.document(document_id) - write_result = document_ref.create(document_data) - return write_result.update_time, document_ref - - def list_documents(self, page_size=None): - """List all subdocuments of the current collection. - - Args: - page_size (Optional[int]]): The maximum number of documents - in each page of results from this request. Non-positive values - are ignored. Defaults to a sensible value set by the API. - - Returns: - Sequence[~.firestore_v1beta1.collection.DocumentReference]: - iterator of subdocuments of the current collection. If the - collection does not exist at the time of `snapshot`, the - iterator will be empty - """ - parent, _ = self._parent_info() - - iterator = self._client._firestore_api.list_documents( - request={ - "parent": parent, - "collection_id": self.id, - "page_size": page_size, - "page_token": True, - }, - metadata=self._client._rpc_metadata, - ) - iterator.collection = self - iterator.item_to_value = _item_to_document_ref - return iterator - - def select(self, field_paths): - """Create a "select" query with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.select` for - more information on this method. - - Args: - field_paths (Iterable[str, ...]): An iterable of field paths - (``.``-delimited list of field names) to use as a projection - of document fields in the query results. - - Returns: - ~.firestore_v1beta1.query.Query: A "projected" query. - """ - query = query_mod.Query(self) - return query.select(field_paths) - - def where(self, field_path, op_string, value): - """Create a "where" query with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.where` for - more information on this method. - - Args: - field_path (str): A field path (``.``-delimited list of - field names) for the field to filter on. - op_string (str): A comparison operation in the form of a string. - Acceptable values are ``<``, ``<=``, ``==``, ``>=`` - and ``>``. - value (Any): The value to compare the field against in the filter. - If ``value`` is :data:`None` or a NaN, then ``==`` is the only - allowed operation. - - Returns: - ~.firestore_v1beta1.query.Query: A filtered query. - """ - query = query_mod.Query(self) - return query.where(field_path, op_string, value) - - def order_by(self, field_path, **kwargs): - """Create an "order by" query with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` for - more information on this method. - - Args: - field_path (str): A field path (``.``-delimited list of - field names) on which to order the query results. - kwargs (Dict[str, Any]): The keyword arguments to pass along - to the query. The only supported keyword is ``direction``, see - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` - for more information. - - Returns: - ~.firestore_v1beta1.query.Query: An "order by" query. - """ - query = query_mod.Query(self) - return query.order_by(field_path, **kwargs) - - def limit(self, count): - """Create a limited query with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.limit` for - more information on this method. - - Args: - count (int): Maximum number of documents to return that match - the query. - - Returns: - ~.firestore_v1beta1.query.Query: A limited query. - """ - query = query_mod.Query(self) - return query.limit(count) - - def offset(self, num_to_skip): - """Skip to an offset in a query with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.offset` for - more information on this method. - - Args: - num_to_skip (int): The number of results to skip at the beginning - of query results. (Must be non-negative.) - - Returns: - ~.firestore_v1beta1.query.Query: An offset query. - """ - query = query_mod.Query(self) - return query.offset(num_to_skip) - - def start_at(self, document_fields): - """Start query at a cursor with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` for - more information on this method. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. - """ - query = query_mod.Query(self) - return query.start_at(document_fields) - - def start_after(self, document_fields): - """Start query after a cursor with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` for - more information on this method. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. - """ - query = query_mod.Query(self) - return query.start_after(document_fields) - - def end_before(self, document_fields): - """End query before a cursor with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` for - more information on this method. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. - """ - query = query_mod.Query(self) - return query.end_before(document_fields) - - def end_at(self, document_fields): - """End query at a cursor with this collection as parent. - - See - :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` for - more information on this method. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. - """ - query = query_mod.Query(self) - return query.end_at(document_fields) - - def get(self, transaction=None): - """Deprecated alias for :meth:`stream`.""" - warnings.warn( - "'Collection.get' is deprecated: please use 'Collection.stream' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.stream(transaction=transaction) - - def stream(self, transaction=None): - """Read the documents in this collection. - - This sends a ``RunQuery`` RPC and then returns an iterator which - consumes each document returned in the stream of ``RunQueryResponse`` - messages. - - .. note:: - - The underlying stream of responses will time out after - the ``max_rpc_timeout_millis`` value set in the GAPIC - client configuration for the ``RunQuery`` API. Snapshots - not consumed from the iterator before that point will be lost. - - If a ``transaction`` is used and it already has write operations - added, this method cannot be used (i.e. read-after-write is not - allowed). - - Args: - transaction (Optional[~.firestore_v1beta1.transaction.\ - Transaction]): An existing transaction that the query will - run in. - - Yields: - ~.firestore_v1beta1.document.DocumentSnapshot: The next - document that fulfills the query. - """ - query = query_mod.Query(self) - return query.stream(transaction=transaction) - - def on_snapshot(self, callback): - """Monitor the documents in this collection. - - This starts a watch on this collection using a background thread. The - provided callback is run on the snapshot of the documents. - - Args: - callback(~.firestore.collection.CollectionSnapshot): a callback - to run when a change occurs. - - Example: - from google.cloud import firestore_v1beta1 - - db = firestore_v1beta1.Client() - collection_ref = db.collection(u'users') - - def on_snapshot(collection_snapshot): - for doc in collection_snapshot.documents: - print(u'{} => {}'.format(doc.id, doc.to_dict())) - - # Watch this collection - collection_watch = collection_ref.on_snapshot(on_snapshot) - - # Terminate this watch - collection_watch.unsubscribe() - """ - return Watch.for_query( - query_mod.Query(self), - callback, - document.DocumentSnapshot, - document.DocumentReference, - ) - - -def _auto_id(): - """Generate a "random" automatically generated ID. - - Returns: - str: A 20 character string composed of digits, uppercase and - lowercase and letters. - """ - return "".join(random.choice(_AUTO_ID_CHARS) for _ in six.moves.xrange(20)) - - -def _item_to_document_ref(iterator, item): - """Convert Document resource to document ref. - - Args: - iterator (google.api_core.page_iterator.GRPCIterator): - iterator response - item (dict): document resource - """ - document_id = item.name.split(_helpers.DOCUMENT_PATH_DELIMITER)[-1] - return iterator.collection.document(document_id) diff --git a/google/cloud/firestore_v1beta1/document.py b/google/cloud/firestore_v1beta1/document.py deleted file mode 100644 index 876787536..000000000 --- a/google/cloud/firestore_v1beta1/document.py +++ /dev/null @@ -1,787 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Classes for representing documents for the Google Cloud Firestore API.""" - -import copy - -import six - -from google.api_core import exceptions -from google.cloud.firestore_v1beta1 import _helpers -from google.cloud.firestore_v1beta1 import field_path as field_path_module -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.watch import Watch - - -class DocumentReference(object): - """A reference to a document in a Firestore database. - - The document may already exist or can be created by this class. - - Args: - path (Tuple[str, ...]): The components in the document path. - This is a series of strings representing each collection and - sub-collection ID, as well as the document IDs for any documents - that contain a sub-collection (as well as the base document). - kwargs (dict): The keyword arguments for the constructor. The only - supported keyword is ``client`` and it must be a - :class:`~google.cloud.firestore_v1beta1.client.Client`. - It represents the client that created this document reference. - - Raises: - ValueError: if - - * the ``path`` is empty - * there are an even number of elements - * a collection ID in ``path`` is not a string - * a document ID in ``path`` is not a string - TypeError: If a keyword other than ``client`` is used. - """ - - _document_path_internal = None - - def __init__(self, *path, **kwargs): - _helpers.verify_path(path, is_collection=False) - self._path = path - self._client = kwargs.pop("client", None) - if kwargs: - raise TypeError( - "Received unexpected arguments", kwargs, "Only `client` is supported" - ) - - def __copy__(self): - """Shallow copy the instance. - - We leave the client "as-is" but tuple-unpack the path. - - Returns: - .DocumentReference: A copy of the current document. - """ - result = self.__class__(*self._path, client=self._client) - result._document_path_internal = self._document_path_internal - return result - - def __deepcopy__(self, unused_memo): - """Deep copy the instance. - - This isn't a true deep copy, wee leave the client "as-is" but - tuple-unpack the path. - - Returns: - .DocumentReference: A copy of the current document. - """ - return self.__copy__() - - def __eq__(self, other): - """Equality check against another instance. - - Args: - other (Any): A value to compare against. - - Returns: - Union[bool, NotImplementedType]: Indicating if the values are - equal. - """ - if isinstance(other, DocumentReference): - return self._client == other._client and self._path == other._path - else: - return NotImplemented - - def __hash__(self): - return hash(self._path) + hash(self._client) - - def __ne__(self, other): - """Inequality check against another instance. - - Args: - other (Any): A value to compare against. - - Returns: - Union[bool, NotImplementedType]: Indicating if the values are - not equal. - """ - if isinstance(other, DocumentReference): - return self._client != other._client or self._path != other._path - else: - return NotImplemented - - @property - def path(self): - """Database-relative for this document. - - Returns: - str: The document's relative path. - """ - return "/".join(self._path) - - @property - def _document_path(self): - """Create and cache the full path for this document. - - Of the form: - - ``projects/{project_id}/databases/{database_id}/... - documents/{document_path}`` - - Returns: - str: The full document path. - - Raises: - ValueError: If the current document reference has no ``client``. - """ - if self._document_path_internal is None: - if self._client is None: - raise ValueError("A document reference requires a `client`.") - self._document_path_internal = _get_document_path(self._client, self._path) - - return self._document_path_internal - - @property - def id(self): - """The document identifier (within its collection). - - Returns: - str: The last component of the path. - """ - return self._path[-1] - - @property - def parent(self): - """Collection that owns the current document. - - Returns: - ~.firestore_v1beta1.collection.CollectionReference: The - parent collection. - """ - parent_path = self._path[:-1] - return self._client.collection(*parent_path) - - def collection(self, collection_id): - """Create a sub-collection underneath the current document. - - Args: - collection_id (str): The sub-collection identifier (sometimes - referred to as the "kind"). - - Returns: - ~.firestore_v1beta1.collection.CollectionReference: The - child collection. - """ - child_path = self._path + (collection_id,) - return self._client.collection(*child_path) - - def create(self, document_data): - """Create the current document in the Firestore database. - - Args: - document_data (dict): Property names and values to use for - creating a document. - - Returns: - google.cloud.firestore_v1beta1.types.WriteResult: The - write result corresponding to the committed document. A write - result contains an ``update_time`` field. - - Raises: - ~google.cloud.exceptions.Conflict: If the document already exists. - """ - batch = self._client.batch() - batch.create(self, document_data) - write_results = batch.commit() - return _first_write_result(write_results) - - def set(self, document_data, merge=False): - """Replace the current document in the Firestore database. - - A write ``option`` can be specified to indicate preconditions of - the "set" operation. If no ``option`` is specified and this document - doesn't exist yet, this method will create it. - - Overwrites all content for the document with the fields in - ``document_data``. This method performs almost the same functionality - as :meth:`create`. The only difference is that this method doesn't - make any requirements on the existence of the document (unless - ``option`` is used), whereas as :meth:`create` will fail if the - document already exists. - - Args: - document_data (dict): Property names and values to use for - replacing a document. - merge (Optional[bool] or Optional[List]): - If True, apply merging instead of overwriting the state - of the document. - - Returns: - google.cloud.firestore_v1beta1.types.WriteResult: The - write result corresponding to the committed document. A write - result contains an ``update_time`` field. - """ - batch = self._client.batch() - batch.set(self, document_data, merge=merge) - write_results = batch.commit() - return _first_write_result(write_results) - - def update(self, field_updates, option=None): - """Update an existing document in the Firestore database. - - By default, this method verifies that the document exists on the - server before making updates. A write ``option`` can be specified to - override these preconditions. - - Each key in ``field_updates`` can either be a field name or a - **field path** (For more information on **field paths**, see - :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path`.) To - illustrate this, consider a document with - - .. code-block:: python - - >>> snapshot = document.get() - >>> snapshot.to_dict() - { - 'foo': { - 'bar': 'baz', - }, - 'other': True, - } - - stored on the server. If the field name is used in the update: - - .. code-block:: python - - >>> field_updates = { - ... 'foo': { - ... 'quux': 800, - ... }, - ... } - >>> document.update(field_updates) - - then all of ``foo`` will be overwritten on the server and the new - value will be - - .. code-block:: python - - >>> snapshot = document.get() - >>> snapshot.to_dict() - { - 'foo': { - 'quux': 800, - }, - 'other': True, - } - - On the other hand, if a ``.``-delimited **field path** is used in the - update: - - .. code-block:: python - - >>> field_updates = { - ... 'foo.quux': 800, - ... } - >>> document.update(field_updates) - - then only ``foo.quux`` will be updated on the server and the - field ``foo.bar`` will remain intact: - - .. code-block:: python - - >>> snapshot = document.get() - >>> snapshot.to_dict() - { - 'foo': { - 'bar': 'baz', - 'quux': 800, - }, - 'other': True, - } - - .. warning:: - - A **field path** can only be used as a top-level key in - ``field_updates``. - - To delete / remove a field from an existing document, use the - :attr:`~google.cloud.firestore_v1beta1.transforms.DELETE_FIELD` - sentinel. So with the example above, sending - - .. code-block:: python - - >>> field_updates = { - ... 'other': firestore.DELETE_FIELD, - ... } - >>> document.update(field_updates) - - would update the value on the server to: - - .. code-block:: python - - >>> snapshot = document.get() - >>> snapshot.to_dict() - { - 'foo': { - 'bar': 'baz', - }, - } - - To set a field to the current time on the server when the - update is received, use the - :attr:`~google.cloud.firestore_v1beta1.transforms.SERVER_TIMESTAMP` - sentinel. Sending - - .. code-block:: python - - >>> field_updates = { - ... 'foo.now': firestore.SERVER_TIMESTAMP, - ... } - >>> document.update(field_updates) - - would update the value on the server to: - - .. code-block:: python - - >>> snapshot = document.get() - >>> snapshot.to_dict() - { - 'foo': { - 'bar': 'baz', - 'now': datetime.datetime(2012, ...), - }, - 'other': True, - } - - Args: - field_updates (dict): Field names or paths to update and values - to update with. - option (Optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - - Returns: - google.cloud.firestore_v1beta1.types.WriteResult: The - write result corresponding to the updated document. A write - result contains an ``update_time`` field. - - Raises: - ~google.cloud.exceptions.NotFound: If the document does not exist. - """ - batch = self._client.batch() - batch.update(self, field_updates, option=option) - write_results = batch.commit() - return _first_write_result(write_results) - - def delete(self, option=None): - """Delete the current document in the Firestore database. - - Args: - option (Optional[~.firestore_v1beta1.client.WriteOption]): A - write option to make assertions / preconditions on the server - state of the document before applying changes. - - Returns: - google.protobuf.timestamp_pb2.Timestamp: The time that the delete - request was received by the server. If the document did not exist - when the delete was sent (i.e. nothing was deleted), this method - will still succeed and will still return the time that the - request was received by the server. - """ - write_pb = _helpers.pb_for_delete(self._document_path, option) - commit_response = self._client._firestore_api.commit( - request={ - "database": self._client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=self._client._rpc_metadata, - ) - - return commit_response.commit_time - - def get(self, field_paths=None, transaction=None): - """Retrieve a snapshot of the current document. - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - If a ``transaction`` is used and it already has write operations - added, this method cannot be used (i.e. read-after-write is not - allowed). - - Args: - field_paths (Optional[Iterable[str, ...]]): An iterable of field - paths (``.``-delimited list of field names) to use as a - projection of document fields in the returned results. If - no value is provided, all fields will be returned. - transaction (Optional[~.firestore_v1beta1.transaction.\ - Transaction]): An existing transaction that this reference - will be retrieved in. - - Returns: - ~.firestore_v1beta1.document.DocumentSnapshot: A snapshot of - the current document. If the document does not exist at - the time of `snapshot`, the snapshot `reference`, `data`, - `update_time`, and `create_time` attributes will all be - `None` and `exists` will be `False`. - """ - if isinstance(field_paths, six.string_types): - raise ValueError("'field_paths' must be a sequence of paths, not a string.") - - if field_paths is not None: - mask = common.DocumentMask(field_paths=sorted(field_paths)) - else: - mask = None - - firestore_api = self._client._firestore_api - try: - document_pb = firestore_api.get_document( - request={ - "name": self._document_path, - "mask": mask, - "transaction": _helpers.get_transaction_id(transaction), - }, - metadata=self._client._rpc_metadata, - ) - except exceptions.NotFound: - data = None - exists = False - create_time = None - update_time = None - else: - data = _helpers.decode_dict(document_pb.fields, self._client) - exists = True - create_time = document_pb.create_time - update_time = document_pb.update_time - - return DocumentSnapshot( - reference=self, - data=data, - exists=exists, - read_time=None, # No server read_time available - create_time=create_time, - update_time=update_time, - ) - - def collections(self, page_size=None): - """List subcollections of the current document. - - Args: - page_size (Optional[int]]): The maximum number of collections - in each page of results from this request. Non-positive values - are ignored. Defaults to a sensible value set by the API. - - Returns: - Sequence[~.firestore_v1beta1.collection.CollectionReference]: - iterator of subcollections of the current document. If the - document does not exist at the time of `snapshot`, the - iterator will be empty - """ - iterator = self._client._firestore_api.list_collection_ids( - request={"parent": self._document_path, "page_size": page_size}, - metadata=self._client._rpc_metadata, - ) - iterator.document = self - iterator.item_to_value = _item_to_collection_ref - return iterator - - def on_snapshot(self, callback): - """Watch this document. - - This starts a watch on this document using a background thread. The - provided callback is run on the snapshot. - - Args: - callback(~.firestore.document.DocumentSnapshot):a callback to run - when a change occurs - - Example: - from google.cloud import firestore_v1beta1 - - db = firestore_v1beta1.Client() - collection_ref = db.collection(u'users') - - def on_snapshot(document_snapshot): - doc = document_snapshot - print(u'{} => {}'.format(doc.id, doc.to_dict())) - - doc_ref = db.collection(u'users').document( - u'alovelace' + unique_resource_id()) - - # Watch this document - doc_watch = doc_ref.on_snapshot(on_snapshot) - - # Terminate this watch - doc_watch.unsubscribe() - """ - return Watch.for_document(self, callback, DocumentSnapshot, DocumentReference) - - -class DocumentSnapshot(object): - """A snapshot of document data in a Firestore database. - - This represents data retrieved at a specific time and may not contain - all fields stored for the document (i.e. a hand-picked selection of - fields may have been retrieved). - - Instances of this class are not intended to be constructed by hand, - rather they'll be returned as responses to various methods, such as - :meth:`~google.cloud.DocumentReference.get`. - - Args: - reference (~.firestore_v1beta1.document.DocumentReference): A - document reference corresponding to the document that contains - the data in this snapshot. - data (Dict[str, Any]): The data retrieved in the snapshot. - exists (bool): Indicates if the document existed at the time the - snapshot was retrieved. - read_time (google.protobuf.timestamp_pb2.Timestamp): The time that - this snapshot was read from the server. - create_time (google.protobuf.timestamp_pb2.Timestamp): The time that - this document was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): The time that - this document was last updated. - """ - - def __init__(self, reference, data, exists, read_time, create_time, update_time): - self._reference = reference - # We want immutable data, so callers can't modify this value - # out from under us. - self._data = copy.deepcopy(data) - self._exists = exists - self.read_time = read_time - """google.protobuf.timestamp_pb2.Timestamp: Time snapshot was read.""" - self.create_time = create_time - """google.protobuf.timestamp_pb2.Timestamp: Document's creation.""" - self.update_time = update_time - """google.protobuf.timestamp_pb2.Timestamp: Document's last update.""" - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self._reference == other._reference and self._data == other._data - - def __hash__(self): - # TODO(microgen, https://github.com/googleapis/proto-plus-python/issues/38): - # maybe add datetime_with_nanos to protoplus, revisit - # seconds = self.update_time.seconds - # nanos = self.update_time.nanos - seconds = int(self.update_time.timestamp()) - nanos = 0 - return hash(self._reference) + hash(seconds) + hash(nanos) - - @property - def _client(self): - """The client that owns the document reference for this snapshot. - - Returns: - ~.firestore_v1beta1.client.Client: The client that owns this - document. - """ - return self._reference._client - - @property - def exists(self): - """Existence flag. - - Indicates if the document existed at the time this snapshot - was retrieved. - - Returns: - bool: The existence flag. - """ - return self._exists - - @property - def id(self): - """The document identifier (within its collection). - - Returns: - str: The last component of the path of the document. - """ - return self._reference.id - - @property - def reference(self): - """Document reference corresponding to document that owns this data. - - Returns: - ~.firestore_v1beta1.document.DocumentReference: A document - reference corresponding to this document. - """ - return self._reference - - def get(self, field_path): - """Get a value from the snapshot data. - - If the data is nested, for example: - - .. code-block:: python - - >>> snapshot.to_dict() - { - 'top1': { - 'middle2': { - 'bottom3': 20, - 'bottom4': 22, - }, - 'middle5': True, - }, - 'top6': b'\x00\x01 foo', - } - - a **field path** can be used to access the nested data. For - example: - - .. code-block:: python - - >>> snapshot.get('top1') - { - 'middle2': { - 'bottom3': 20, - 'bottom4': 22, - }, - 'middle5': True, - } - >>> snapshot.get('top1.middle2') - { - 'bottom3': 20, - 'bottom4': 22, - } - >>> snapshot.get('top1.middle2.bottom3') - 20 - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - A copy is returned since the data may contain mutable values, - but the data stored in the snapshot must remain immutable. - - Args: - field_path (str): A field path (``.``-delimited list of - field names). - - Returns: - Any or None: - (A copy of) the value stored for the ``field_path`` or - None if snapshot document does not exist. - - Raises: - KeyError: If the ``field_path`` does not match nested data - in the snapshot. - """ - if not self._exists: - return None - nested_data = field_path_module.get_nested_value(field_path, self._data) - return copy.deepcopy(nested_data) - - def to_dict(self): - """Retrieve the data contained in this snapshot. - - A copy is returned since the data may contain mutable values, - but the data stored in the snapshot must remain immutable. - - Returns: - Dict[str, Any] or None: - The data in the snapshot. Returns None if reference - does not exist. - """ - if not self._exists: - return None - return copy.deepcopy(self._data) - - -def _get_document_path(client, path): - """Convert a path tuple into a full path string. - - Of the form: - - ``projects/{project_id}/databases/{database_id}/... - documents/{document_path}`` - - Args: - client (~.firestore_v1beta1.client.Client): The client that holds - configuration details and a GAPIC client object. - path (Tuple[str, ...]): The components in a document path. - - Returns: - str: The fully-qualified document path. - """ - parts = (client._database_string, "documents") + path - return _helpers.DOCUMENT_PATH_DELIMITER.join(parts) - - -def _consume_single_get(response_iterator): - """Consume a gRPC stream that should contain a single response. - - The stream will correspond to a ``BatchGetDocuments`` request made - for a single document. - - Args: - response_iterator (~google.cloud.exceptions.GrpcRendezvous): A - streaming iterator returned from a ``BatchGetDocuments`` - request. - - Returns: - ~google.cloud.proto.firestore.v1beta1.\ - firestore.BatchGetDocumentsResponse: The single "get" - response in the batch. - - Raises: - ValueError: If anything other than exactly one response is returned. - """ - # Calling ``list()`` consumes the entire iterator. - all_responses = list(response_iterator) - if len(all_responses) != 1: - raise ValueError( - "Unexpected response from `BatchGetDocumentsResponse`", - all_responses, - "Expected only one result", - ) - - return all_responses[0] - - -def _first_write_result(write_results): - """Get first write result from list. - - For cases where ``len(write_results) > 1``, this assumes the writes - occurred at the same time (e.g. if an update and transform are sent - at the same time). - - Args: - write_results (List[google.cloud.proto.firestore.v1beta1.\ - write.WriteResult, ...]: The write results from a - ``CommitResponse``. - - Returns: - google.cloud.firestore_v1beta1.types.WriteResult: The - lone write result from ``write_results``. - - Raises: - ValueError: If there are zero write results. This is likely to - **never** occur, since the backend should be stable. - """ - if not write_results: - raise ValueError("Expected at least one write result") - - return write_results[0] - - -def _item_to_collection_ref(iterator, item): - """Convert collection ID to collection ref. - - Args: - iterator (google.api_core.page_iterator.GRPCIterator): - iterator response - item (str): ID of the collection - """ - return iterator.document.collection(item) diff --git a/google/cloud/firestore_v1beta1/field_path.py b/google/cloud/firestore_v1beta1/field_path.py deleted file mode 100644 index 1570aefb5..000000000 --- a/google/cloud/firestore_v1beta1/field_path.py +++ /dev/null @@ -1,386 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utilities for managing / converting field paths to / from strings.""" - -try: - from collections import abc as collections_abc -except ImportError: # Python 2.7 - import collections as collections_abc - -import re - -import six - - -_FIELD_PATH_MISSING_TOP = "{!r} is not contained in the data" -_FIELD_PATH_MISSING_KEY = "{!r} is not contained in the data for the key {!r}" -_FIELD_PATH_WRONG_TYPE = ( - "The data at {!r} is not a dictionary, so it cannot contain the key {!r}" -) - -_FIELD_PATH_DELIMITER = "." -_BACKSLASH = "\\" -_ESCAPED_BACKSLASH = _BACKSLASH * 2 -_BACKTICK = "`" -_ESCAPED_BACKTICK = _BACKSLASH + _BACKTICK - -_SIMPLE_FIELD_NAME = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*$") -_LEADING_ALPHA_INVALID = re.compile("^[_a-zA-Z][_a-zA-Z0-9]*[^_a-zA-Z0-9]") -PATH_ELEMENT_TOKENS = [ - ("SIMPLE", r"[_a-zA-Z][_a-zA-Z0-9]*"), # unquoted elements - ("QUOTED", r"`(?:\\`|[^`])*?`"), # quoted elements, unquoted - ("DOT", r"\."), # separator -] -TOKENS_PATTERN = "|".join("(?P<{}>{})".format(*pair) for pair in PATH_ELEMENT_TOKENS) -TOKENS_REGEX = re.compile(TOKENS_PATTERN) - - -def _tokenize_field_path(path): - """Lex a field path into tokens (including dots). - - Args: - path (str): field path to be lexed. - Returns: - List(str): tokens - """ - pos = 0 - get_token = TOKENS_REGEX.match - match = get_token(path) - while match is not None: - type_ = match.lastgroup - value = match.group(type_) - yield value - pos = match.end() - match = get_token(path, pos) - if pos != len(path): - raise ValueError("Path {} not consumed, residue: {}".format(path, path[pos:])) - - -def split_field_path(path): - """Split a field path into valid elements (without dots). - - Args: - path (str): field path to be lexed. - Returns: - List(str): tokens - Raises: - ValueError: if the path does not match the elements-interspersed- - with-dots pattern. - """ - if not path: - return [] - - elements = [] - want_dot = False - - for element in _tokenize_field_path(path): - if want_dot: - if element != ".": - raise ValueError("Invalid path: {}".format(path)) - else: - want_dot = False - else: - if element == ".": - raise ValueError("Invalid path: {}".format(path)) - elements.append(element) - want_dot = True - - if not want_dot or not elements: - raise ValueError("Invalid path: {}".format(path)) - - return elements - - -def parse_field_path(api_repr): - """Parse a **field path** from into a list of nested field names. - - See :func:`field_path` for more on **field paths**. - - Args: - api_repr (str): - The unique Firestore api representation which consists of - either simple or UTF-8 field names. It cannot exceed - 1500 bytes, and cannot be empty. Simple field names match - ``'^[_a-zA-Z][_a-zA-Z0-9]*$'``. All other field names are - escaped by surrounding them with backticks. - - Returns: - List[str, ...]: The list of field names in the field path. - """ - # code dredged back up from - # https://github.com/googleapis/google-cloud-python/pull/5109/files - field_names = [] - for field_name in split_field_path(api_repr): - # non-simple field name - if field_name[0] == "`" and field_name[-1] == "`": - field_name = field_name[1:-1] - field_name = field_name.replace(_ESCAPED_BACKTICK, _BACKTICK) - field_name = field_name.replace(_ESCAPED_BACKSLASH, _BACKSLASH) - field_names.append(field_name) - return field_names - - -def render_field_path(field_names): - """Create a **field path** from a list of nested field names. - - A **field path** is a ``.``-delimited concatenation of the field - names. It is used to represent a nested field. For example, - in the data - - .. code-block: python - - data = { - 'aa': { - 'bb': { - 'cc': 10, - }, - }, - } - - the field path ``'aa.bb.cc'`` represents that data stored in - ``data['aa']['bb']['cc']``. - - Args: - field_names (Iterable[str, ...]): The list of field names. - - Returns: - str: The ``.``-delimited field path. - """ - result = [] - - for field_name in field_names: - match = _SIMPLE_FIELD_NAME.match(field_name) - if match and match.group(0) == field_name: - result.append(field_name) - else: - replaced = field_name.replace(_BACKSLASH, _ESCAPED_BACKSLASH).replace( - _BACKTICK, _ESCAPED_BACKTICK - ) - result.append(_BACKTICK + replaced + _BACKTICK) - - return _FIELD_PATH_DELIMITER.join(result) - - -get_field_path = render_field_path # backward-compatibility - - -def get_nested_value(field_path, data): - """Get a (potentially nested) value from a dictionary. - - If the data is nested, for example: - - .. code-block:: python - - >>> data - { - 'top1': { - 'middle2': { - 'bottom3': 20, - 'bottom4': 22, - }, - 'middle5': True, - }, - 'top6': b'\x00\x01 foo', - } - - a **field path** can be used to access the nested data. For - example: - - .. code-block:: python - - >>> get_nested_value('top1', data) - { - 'middle2': { - 'bottom3': 20, - 'bottom4': 22, - }, - 'middle5': True, - } - >>> get_nested_value('top1.middle2', data) - { - 'bottom3': 20, - 'bottom4': 22, - } - >>> get_nested_value('top1.middle2.bottom3', data) - 20 - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` for - more information on **field paths**. - - Args: - field_path (str): A field path (``.``-delimited list of - field names). - data (Dict[str, Any]): The (possibly nested) data. - - Returns: - Any: (A copy of) the value stored for the ``field_path``. - - Raises: - KeyError: If the ``field_path`` does not match nested data. - """ - field_names = parse_field_path(field_path) - - nested_data = data - for index, field_name in enumerate(field_names): - if isinstance(nested_data, collections_abc.Mapping): - if field_name in nested_data: - nested_data = nested_data[field_name] - else: - if index == 0: - msg = _FIELD_PATH_MISSING_TOP.format(field_name) - raise KeyError(msg) - else: - partial = render_field_path(field_names[:index]) - msg = _FIELD_PATH_MISSING_KEY.format(field_name, partial) - raise KeyError(msg) - else: - partial = render_field_path(field_names[:index]) - msg = _FIELD_PATH_WRONG_TYPE.format(partial, field_name) - raise KeyError(msg) - - return nested_data - - -class FieldPath(object): - """Field Path object for client use. - - A field path is a sequence of element keys, separated by periods. - Each element key can be either a simple identifier, or a full unicode - string. - - In the string representation of a field path, non-identifier elements - must be quoted using backticks, with internal backticks and backslashes - escaped with a backslash. - - Args: - parts: (one or more strings) - Indicating path of the key to be used. - """ - - def __init__(self, *parts): - for part in parts: - if not isinstance(part, six.string_types) or not part: - error = "One or more components is not a string or is empty." - raise ValueError(error) - self.parts = tuple(parts) - - @classmethod - def from_api_repr(cls, api_repr): - """Factory: create a FieldPath from the string formatted per the API. - - Args: - api_repr (str): a string path, with non-identifier elements quoted - It cannot exceed 1500 characters, and cannot be empty. - Returns: - (:class:`FieldPath`) An instance parsed from ``api_repr``. - Raises: - ValueError if the parsing fails - """ - api_repr = api_repr.strip() - if not api_repr: - raise ValueError("Field path API representation cannot be empty.") - return cls(*parse_field_path(api_repr)) - - @classmethod - def from_string(cls, path_string): - """Factory: create a FieldPath from a unicode string representation. - - This method splits on the character `.` and disallows the - characters `~*/[]`. To create a FieldPath whose components have - those characters, call the constructor. - - Args: - path_string (str): A unicode string which cannot contain - `~*/[]` characters, cannot exceed 1500 bytes, and cannot be empty. - - Returns: - (:class:`FieldPath`) An instance parsed from ``path_string``. - """ - try: - return cls.from_api_repr(path_string) - except ValueError: - elements = path_string.split(".") - for element in elements: - if not element: - raise ValueError("Empty element") - if _LEADING_ALPHA_INVALID.match(element): - raise ValueError( - "Non-alphanum char in element with leading alpha: {}".format( - element - ) - ) - return FieldPath(*elements) - - def __repr__(self): - paths = "" - for part in self.parts: - paths += "'" + part + "'," - paths = paths[:-1] - return "FieldPath({})".format(paths) - - def __hash__(self): - return hash(self.to_api_repr()) - - def __eq__(self, other): - if isinstance(other, FieldPath): - return self.parts == other.parts - return NotImplemented - - def __lt__(self, other): - if isinstance(other, FieldPath): - return self.parts < other.parts - return NotImplemented - - def __add__(self, other): - """Adds `other` field path to end of this field path. - - Args: - other (~google.cloud.firestore_v1beta1._helpers.FieldPath, str): - The field path to add to the end of this `FieldPath`. - """ - if isinstance(other, FieldPath): - parts = self.parts + other.parts - return FieldPath(*parts) - elif isinstance(other, six.string_types): - parts = self.parts + FieldPath.from_string(other).parts - return FieldPath(*parts) - else: - return NotImplemented - - def to_api_repr(self): - """Render a quoted string representation of the FieldPath - - Returns: - (str) Quoted string representation of the path stored - within this FieldPath. - """ - return render_field_path(self.parts) - - def eq_or_parent(self, other): - """Check whether ``other`` is an ancestor. - - Returns: - (bool) True IFF ``other`` is an ancestor or equal to ``self``, - else False. - """ - return self.parts[: len(other.parts)] == other.parts[: len(self.parts)] - - def lineage(self): - """Return field paths for all parents. - - Returns: Set[:class:`FieldPath`] - """ - indexes = six.moves.range(1, len(self.parts)) - return {FieldPath(*self.parts[:index]) for index in indexes} diff --git a/google/cloud/firestore_v1beta1/order.py b/google/cloud/firestore_v1beta1/order.py deleted file mode 100644 index f375fa1b7..000000000 --- a/google/cloud/firestore_v1beta1/order.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from enum import Enum -from google.cloud.firestore_v1beta1._helpers import decode_value -import math - - -class TypeOrder(Enum): - # NOTE: This order is defined by the backend and cannot be changed. - NULL = 0 - BOOLEAN = 1 - NUMBER = 2 - TIMESTAMP = 3 - STRING = 4 - BLOB = 5 - REF = 6 - GEO_POINT = 7 - ARRAY = 8 - OBJECT = 9 - - @staticmethod - def from_value(value): - v = value._pb.WhichOneof("value_type") - - lut = { - "null_value": TypeOrder.NULL, - "boolean_value": TypeOrder.BOOLEAN, - "integer_value": TypeOrder.NUMBER, - "double_value": TypeOrder.NUMBER, - "timestamp_value": TypeOrder.TIMESTAMP, - "string_value": TypeOrder.STRING, - "bytes_value": TypeOrder.BLOB, - "reference_value": TypeOrder.REF, - "geo_point_value": TypeOrder.GEO_POINT, - "array_value": TypeOrder.ARRAY, - "map_value": TypeOrder.OBJECT, - } - - if v not in lut: - raise ValueError("Could not detect value type for " + str(v)) - return lut[v] - - -class Order(object): - """ - Order implements the ordering semantics of the backend. - """ - - @classmethod - def compare(cls, left, right): - """ - Main comparison function for all Firestore types. - @return -1 is left < right, 0 if left == right, otherwise 1 - """ - # First compare the types. - leftType = TypeOrder.from_value(left).value - rightType = TypeOrder.from_value(right).value - - if leftType != rightType: - if leftType < rightType: - return -1 - return 1 - - value_type = left._pb.WhichOneof("value_type") - - if value_type == "null_value": - return 0 # nulls are all equal - elif value_type == "boolean_value": - return cls._compare_to(left.boolean_value, right.boolean_value) - elif value_type == "integer_value": - return cls.compare_numbers(left, right) - elif value_type == "double_value": - return cls.compare_numbers(left, right) - elif value_type == "timestamp_value": - return cls.compare_timestamps(left, right) - elif value_type == "string_value": - return cls._compare_to(left.string_value, right.string_value) - elif value_type == "bytes_value": - return cls.compare_blobs(left, right) - elif value_type == "reference_value": - return cls.compare_resource_paths(left, right) - elif value_type == "geo_point_value": - return cls.compare_geo_points(left, right) - elif value_type == "array_value": - return cls.compare_arrays(left, right) - elif value_type == "map_value": - return cls.compare_objects(left, right) - else: - raise ValueError("Unknown ``value_type``", str(value_type)) - - @staticmethod - def compare_blobs(left, right): - left_bytes = left.bytes_value - right_bytes = right.bytes_value - - return Order._compare_to(left_bytes, right_bytes) - - @staticmethod - def compare_timestamps(left, right): - left = left._pb.timestamp_value - right = right._pb.timestamp_value - - seconds = Order._compare_to(left.seconds or 0, right.seconds or 0) - if seconds != 0: - return seconds - - return Order._compare_to(left.nanos or 0, right.nanos or 0) - - @staticmethod - def compare_geo_points(left, right): - left_value = decode_value(left, None) - right_value = decode_value(right, None) - cmp = (left_value.latitude > right_value.latitude) - ( - left_value.latitude < right_value.latitude - ) - - if cmp != 0: - return cmp - return (left_value.longitude > right_value.longitude) - ( - left_value.longitude < right_value.longitude - ) - - @staticmethod - def compare_resource_paths(left, right): - left = left.reference_value - right = right.reference_value - - left_segments = left.split("/") - right_segments = right.split("/") - shorter = min(len(left_segments), len(right_segments)) - # compare segments - for i in range(shorter): - if left_segments[i] < right_segments[i]: - return -1 - if left_segments[i] > right_segments[i]: - return 1 - - left_length = len(left) - right_length = len(right) - return (left_length > right_length) - (left_length < right_length) - - @staticmethod - def compare_arrays(left, right): - l_values = left.array_value.values - r_values = right.array_value.values - - length = min(len(l_values), len(r_values)) - for i in range(length): - cmp = Order.compare(l_values[i], r_values[i]) - if cmp != 0: - return cmp - - return Order._compare_to(len(l_values), len(r_values)) - - @staticmethod - def compare_objects(left, right): - left_fields = left.map_value.fields - right_fields = right.map_value.fields - - for left_key, right_key in zip(sorted(left_fields), sorted(right_fields)): - keyCompare = Order._compare_to(left_key, right_key) - if keyCompare != 0: - return keyCompare - - value_compare = Order.compare( - left_fields[left_key], right_fields[right_key] - ) - if value_compare != 0: - return value_compare - - return Order._compare_to(len(left_fields), len(right_fields)) - - @staticmethod - def compare_numbers(left, right): - left_value = decode_value(left, None) - right_value = decode_value(right, None) - return Order.compare_doubles(left_value, right_value) - - @staticmethod - def compare_doubles(left, right): - if math.isnan(left): - if math.isnan(right): - return 0 - return -1 - if math.isnan(right): - return 1 - - return Order._compare_to(left, right) - - @staticmethod - def _compare_to(left, right): - # We can't just use cmp(left, right) because cmp doesn't exist - # in Python 3, so this is an equivalent suggested by - # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons - return (left > right) - (left < right) diff --git a/google/cloud/firestore_v1beta1/py.typed b/google/cloud/firestore_v1beta1/py.typed deleted file mode 100644 index cebdc43f1..000000000 --- a/google/cloud/firestore_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-firestore package uses inline types. diff --git a/google/cloud/firestore_v1beta1/query.py b/google/cloud/firestore_v1beta1/query.py deleted file mode 100644 index 54586f341..000000000 --- a/google/cloud/firestore_v1beta1/query.py +++ /dev/null @@ -1,969 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Classes for representing queries for the Google Cloud Firestore API. - -A :class:`~google.cloud.firestore_v1beta1.query.Query` can be created directly -from a :class:`~google.cloud.firestore_v1beta1.collection.Collection`, -and that can be a more common way to create a query than direct usage of the -constructor. -""" -import copy -import math -import warnings - -from google.protobuf import wrappers_pb2 -import six - -from google.cloud.firestore_v1beta1 import _helpers -from google.cloud.firestore_v1beta1 import document -from google.cloud.firestore_v1beta1 import field_path as field_path_module -from google.cloud.firestore_v1beta1 import transforms -from google.cloud.firestore_v1beta1.types import StructuredQuery -from google.cloud.firestore_v1beta1.types import query -from google.cloud.firestore_v1beta1.order import Order -from google.cloud.firestore_v1beta1.watch import Watch - -_EQ_OP = "==" -_operator_enum = StructuredQuery.FieldFilter.Operator -_COMPARISON_OPERATORS = { - "<": _operator_enum.LESS_THAN, - "<=": _operator_enum.LESS_THAN_OR_EQUAL, - _EQ_OP: _operator_enum.EQUAL, - ">=": _operator_enum.GREATER_THAN_OR_EQUAL, - ">": _operator_enum.GREATER_THAN, - "array_contains": _operator_enum.ARRAY_CONTAINS, -} -_BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}." -_BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values' -_INVALID_WHERE_TRANSFORM = "Transforms cannot be used as where values." -_BAD_DIR_STRING = "Invalid direction {!r}. Must be one of {!r} or {!r}." -_INVALID_CURSOR_TRANSFORM = "Transforms cannot be used as cursor values." -_MISSING_ORDER_BY = ( - 'The "order by" field path {!r} is not present in the cursor data {!r}. ' - "All fields sent to ``order_by()`` must be present in the fields " - "if passed to one of ``start_at()`` / ``start_after()`` / " - "``end_before()`` / ``end_at()`` to define a cursor." -) -_NO_ORDERS_FOR_CURSOR = ( - "Attempting to create a cursor with no fields to order on. " - "When defining a cursor with one of ``start_at()`` / ``start_after()`` / " - "``end_before()`` / ``end_at()``, all fields in the cursor must " - "come from fields set in ``order_by()``." -) -_MISMATCH_CURSOR_W_ORDER_BY = "The cursor {!r} does not match the order fields {!r}." - - -class Query(object): - """Represents a query to the Firestore API. - - Instances of this class are considered immutable: all methods that - would modify an instance instead return a new instance. - - Args: - parent (~.firestore_v1beta1.collection.Collection): The collection - that this query applies to. - projection (Optional[google.cloud.proto.firestore.v1beta1.\ - query.StructuredQuery.Projection]): A projection of document - fields to limit the query results to. - field_filters (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\ - query.StructuredQuery.FieldFilter, ...]]): The filters to be - applied in the query. - orders (Optional[Tuple[google.cloud.proto.firestore.v1beta1.\ - query.StructuredQuery.Order, ...]]): The "order by" entries - to use in the query. - limit (Optional[int]): The maximum number of documents the - query is allowed to return. - offset (Optional[int]): The number of results to skip. - start_at (Optional[Tuple[dict, bool]]): Two-tuple of - - * a mapping of fields. Any field that is present in this mapping - must also be present in ``orders`` - * an ``after`` flag - - The fields and the flag combine to form a cursor used as - a starting point in a query result set. If the ``after`` - flag is :data:`True`, the results will start just after any - documents which have fields matching the cursor, otherwise - any matching documents will be included in the result set. - When the query is formed, the document values - will be used in the order given by ``orders``. - end_at (Optional[Tuple[dict, bool]]): Two-tuple of - - * a mapping of fields. Any field that is present in this mapping - must also be present in ``orders`` - * a ``before`` flag - - The fields and the flag combine to form a cursor used as - an ending point in a query result set. If the ``before`` - flag is :data:`True`, the results will end just before any - documents which have fields matching the cursor, otherwise - any matching documents will be included in the result set. - When the query is formed, the document values - will be used in the order given by ``orders``. - """ - - ASCENDING = "ASCENDING" - """str: Sort query results in ascending order on a field.""" - DESCENDING = "DESCENDING" - """str: Sort query results in descending order on a field.""" - - def __init__( - self, - parent, - projection=None, - field_filters=(), - orders=(), - limit=None, - offset=None, - start_at=None, - end_at=None, - ): - self._parent = parent - self._projection = projection - self._field_filters = field_filters - self._orders = orders - self._limit = limit - self._offset = offset - self._start_at = start_at - self._end_at = end_at - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return ( - self._parent == other._parent - and self._projection == other._projection - and self._field_filters == other._field_filters - and self._orders == other._orders - and self._limit == other._limit - and self._offset == other._offset - and self._start_at == other._start_at - and self._end_at == other._end_at - ) - - @property - def _client(self): - """The client of the parent collection. - - Returns: - ~.firestore_v1beta1.client.Client: The client that owns - this query. - """ - return self._parent._client - - def select(self, field_paths): - """Project documents matching query to a limited set of fields. - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - If the current query already has a projection set (i.e. has already - called :meth:`~google.cloud.firestore_v1beta1.query.Query.select`), - this will overwrite it. - - Args: - field_paths (Iterable[str, ...]): An iterable of field paths - (``.``-delimited list of field names) to use as a projection - of document fields in the query results. - - Returns: - ~.firestore_v1beta1.query.Query: A "projected" query. Acts as - a copy of the current query, modified with the newly added - projection. - Raises: - ValueError: If any ``field_path`` is invalid. - """ - field_paths = list(field_paths) - for field_path in field_paths: - field_path_module.split_field_path(field_path) # raises - - new_projection = query.StructuredQuery.Projection( - fields=[ - query.StructuredQuery.FieldReference(field_path=field_path) - for field_path in field_paths - ] - ) - return self.__class__( - self._parent, - projection=new_projection, - field_filters=self._field_filters, - orders=self._orders, - limit=self._limit, - offset=self._offset, - start_at=self._start_at, - end_at=self._end_at, - ) - - def where(self, field_path, op_string, value): - """Filter the query on a field. - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - Returns a new :class:`~google.cloud.firestore_v1beta1.query.Query` - that filters on a specific field path, according to an operation - (e.g. ``==`` or "equals") and a particular value to be paired with - that operation. - - Args: - field_path (str): A field path (``.``-delimited list of - field names) for the field to filter on. - op_string (str): A comparison operation in the form of a string. - Acceptable values are ``<``, ``<=``, ``==``, ``>=`` - and ``>``. - value (Any): The value to compare the field against in the filter. - If ``value`` is :data:`None` or a NaN, then ``==`` is the only - allowed operation. - - Returns: - ~.firestore_v1beta1.query.Query: A filtered query. Acts as a - copy of the current query, modified with the newly added filter. - - Raises: - ValueError: If ``field_path`` is invalid. - ValueError: If ``value`` is a NaN or :data:`None` and - ``op_string`` is not ``==``. - """ - field_path_module.split_field_path(field_path) # raises - - if value is None: - if op_string != _EQ_OP: - raise ValueError(_BAD_OP_NAN_NULL) - filter_pb = query.StructuredQuery.UnaryFilter( - field=query.StructuredQuery.FieldReference(field_path=field_path), - op=StructuredQuery.UnaryFilter.Operator.IS_NULL, - ) - elif _isnan(value): - if op_string != _EQ_OP: - raise ValueError(_BAD_OP_NAN_NULL) - filter_pb = query.StructuredQuery.UnaryFilter( - field=query.StructuredQuery.FieldReference(field_path=field_path), - op=StructuredQuery.UnaryFilter.Operator.IS_NAN, - ) - elif isinstance(value, (transforms.Sentinel, transforms._ValueList)): - raise ValueError(_INVALID_WHERE_TRANSFORM) - else: - filter_pb = query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path=field_path), - op=_enum_from_op_string(op_string), - value=_helpers.encode_value(value), - ) - - new_filters = self._field_filters + (filter_pb,) - return self.__class__( - self._parent, - projection=self._projection, - field_filters=new_filters, - orders=self._orders, - limit=self._limit, - offset=self._offset, - start_at=self._start_at, - end_at=self._end_at, - ) - - @staticmethod - def _make_order(field_path, direction): - """Helper for :meth:`order_by`.""" - return query.StructuredQuery.Order( - field=query.StructuredQuery.FieldReference(field_path=field_path), - direction=_enum_from_direction(direction), - ) - - def order_by(self, field_path, direction=ASCENDING): - """Modify the query to add an order clause on a specific field. - - See :meth:`~google.cloud.firestore_v1beta1.client.Client.field_path` - for more information on **field paths**. - - Successive :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by` calls - will further refine the ordering of results returned by the query - (i.e. the new "order by" fields will be added to existing ones). - - Args: - field_path (str): A field path (``.``-delimited list of - field names) on which to order the query results. - direction (Optional[str]): The direction to order by. Must be one - of :attr:`ASCENDING` or :attr:`DESCENDING`, defaults to - :attr:`ASCENDING`. - - Returns: - ~.firestore_v1beta1.query.Query: An ordered query. Acts as a - copy of the current query, modified with the newly added - "order by" constraint. - - Raises: - ValueError: If ``field_path`` is invalid. - ValueError: If ``direction`` is not one of :attr:`ASCENDING` or - :attr:`DESCENDING`. - """ - field_path_module.split_field_path(field_path) # raises - - order_pb = self._make_order(field_path, direction) - - new_orders = self._orders + (order_pb,) - return self.__class__( - self._parent, - projection=self._projection, - field_filters=self._field_filters, - orders=new_orders, - limit=self._limit, - offset=self._offset, - start_at=self._start_at, - end_at=self._end_at, - ) - - def limit(self, count): - """Limit a query to return a fixed number of results. - - If the current query already has a limit set, this will overwrite it. - - Args: - count (int): Maximum number of documents to return that match - the query. - - Returns: - ~.firestore_v1beta1.query.Query: A limited query. Acts as a - copy of the current query, modified with the newly added - "limit" filter. - """ - return self.__class__( - self._parent, - projection=self._projection, - field_filters=self._field_filters, - orders=self._orders, - limit=count, - offset=self._offset, - start_at=self._start_at, - end_at=self._end_at, - ) - - def offset(self, num_to_skip): - """Skip to an offset in a query. - - If the current query already has specified an offset, this will - overwrite it. - - Args: - num_to_skip (int): The number of results to skip at the beginning - of query results. (Must be non-negative.) - - Returns: - ~.firestore_v1beta1.query.Query: An offset query. Acts as a - copy of the current query, modified with the newly added - "offset" field. - """ - return self.__class__( - self._parent, - projection=self._projection, - field_filters=self._field_filters, - orders=self._orders, - limit=self._limit, - offset=num_to_skip, - start_at=self._start_at, - end_at=self._end_at, - ) - - def _cursor_helper(self, document_fields, before, start): - """Set values to be used for a ``start_at`` or ``end_at`` cursor. - - The values will later be used in a query protobuf. - - When the query is sent to the server, the ``document_fields`` will - be used in the order given by fields set by - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - before (bool): Flag indicating if the document in - ``document_fields`` should (:data:`False`) or - shouldn't (:data:`True`) be included in the result set. - start (Optional[bool]): determines if the cursor is a ``start_at`` - cursor (:data:`True`) or an ``end_at`` cursor (:data:`False`). - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. Acts as - a copy of the current query, modified with the newly added - "start at" cursor. - """ - if isinstance(document_fields, tuple): - document_fields = list(document_fields) - elif isinstance(document_fields, document.DocumentSnapshot): - if document_fields.reference._path[:-1] != self._parent._path: - raise ValueError( - "Cannot use snapshot from another collection as a cursor." - ) - else: - # NOTE: We copy so that the caller can't modify after calling. - document_fields = copy.deepcopy(document_fields) - - cursor_pair = document_fields, before - query_kwargs = { - "projection": self._projection, - "field_filters": self._field_filters, - "orders": self._orders, - "limit": self._limit, - "offset": self._offset, - } - if start: - query_kwargs["start_at"] = cursor_pair - query_kwargs["end_at"] = self._end_at - else: - query_kwargs["start_at"] = self._start_at - query_kwargs["end_at"] = cursor_pair - - return self.__class__(self._parent, **query_kwargs) - - def start_at(self, document_fields): - """Start query results at a particular document value. - - The result set will **include** the document specified by - ``document_fields``. - - If the current query already has specified a start cursor -- either - via this method or - :meth:`~google.cloud.firestore_v1beta1.query.Query.start_after` -- this will - overwrite it. - - When the query is sent to the server, the ``document_fields`` will - be used in the order given by fields set by - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. Acts as - a copy of the current query, modified with the newly added - "start at" cursor. - """ - return self._cursor_helper(document_fields, before=True, start=True) - - def start_after(self, document_fields): - """Start query results after a particular document value. - - The result set will **exclude** the document specified by - ``document_fields``. - - If the current query already has specified a start cursor -- either - via this method or - :meth:`~google.cloud.firestore_v1beta1.query.Query.start_at` -- this will - overwrite it. - - When the query is sent to the server, the ``document_fields`` will - be used in the order given by fields set by - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. Acts as - a copy of the current query, modified with the newly added - "start after" cursor. - """ - return self._cursor_helper(document_fields, before=False, start=True) - - def end_before(self, document_fields): - """End query results before a particular document value. - - The result set will **exclude** the document specified by - ``document_fields``. - - If the current query already has specified an end cursor -- either - via this method or - :meth:`~google.cloud.firestore_v1beta1.query.Query.end_at` -- this will - overwrite it. - - When the query is sent to the server, the ``document_fields`` will - be used in the order given by fields set by - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. Acts as - a copy of the current query, modified with the newly added - "end before" cursor. - """ - return self._cursor_helper(document_fields, before=True, start=False) - - def end_at(self, document_fields): - """End query results at a particular document value. - - The result set will **include** the document specified by - ``document_fields``. - - If the current query already has specified an end cursor -- either - via this method or - :meth:`~google.cloud.firestore_v1beta1.query.Query.end_before` -- this will - overwrite it. - - When the query is sent to the server, the ``document_fields`` will - be used in the order given by fields set by - :meth:`~google.cloud.firestore_v1beta1.query.Query.order_by`. - - Args: - document_fields (Union[~.firestore_v1beta1.\ - document.DocumentSnapshot, dict, list, tuple]): a document - snapshot or a dictionary/list/tuple of fields representing a - query results cursor. A cursor is a collection of values that - represent a position in a query result set. - - Returns: - ~.firestore_v1beta1.query.Query: A query with cursor. Acts as - a copy of the current query, modified with the newly added - "end at" cursor. - """ - return self._cursor_helper(document_fields, before=False, start=False) - - def _filters_pb(self): - """Convert all the filters into a single generic Filter protobuf. - - This may be a lone field filter or unary filter, may be a composite - filter or may be :data:`None`. - - Returns: - google.cloud.firestore_v1beta1.types.\ - StructuredQuery.Filter: A "generic" filter representing the - current query's filters. - """ - num_filters = len(self._field_filters) - if num_filters == 0: - return None - elif num_filters == 1: - return _filter_pb(self._field_filters[0]) - else: - composite_filter = query.StructuredQuery.CompositeFilter( - op=StructuredQuery.CompositeFilter.Operator.AND, - filters=[_filter_pb(filter_) for filter_ in self._field_filters], - ) - return query.StructuredQuery.Filter(composite_filter=composite_filter) - - @staticmethod - def _normalize_projection(projection): - """Helper: convert field paths to message.""" - if projection is not None: - - fields = list(projection.fields) - - if not fields: - field_ref = query.StructuredQuery.FieldReference(field_path="__name__") - return query.StructuredQuery.Projection(fields=[field_ref]) - - return projection - - def _normalize_orders(self): - """Helper: adjust orders based on cursors, where clauses.""" - orders = list(self._orders) - _has_snapshot_cursor = False - - if self._start_at: - if isinstance(self._start_at[0], document.DocumentSnapshot): - _has_snapshot_cursor = True - - if self._end_at: - if isinstance(self._end_at[0], document.DocumentSnapshot): - _has_snapshot_cursor = True - - if _has_snapshot_cursor: - should_order = [ - _enum_from_op_string(key) - for key in _COMPARISON_OPERATORS - if key not in (_EQ_OP, "array_contains") - ] - order_keys = [order.field.field_path for order in orders] - for filter_ in self._field_filters: - field = filter_.field.field_path - if filter_.op in should_order and field not in order_keys: - orders.append(self._make_order(field, "ASCENDING")) - if not orders: - orders.append(self._make_order("__name__", "ASCENDING")) - else: - order_keys = [order.field.field_path for order in orders] - if "__name__" not in order_keys: - direction = orders[-1].direction # enum? - orders.append(self._make_order("__name__", direction)) - - return orders - - def _normalize_cursor(self, cursor, orders): - """Helper: convert cursor to a list of values based on orders.""" - if cursor is None: - return - - if not orders: - raise ValueError(_NO_ORDERS_FOR_CURSOR) - - document_fields, before = cursor - - order_keys = [order.field.field_path for order in orders] - - if isinstance(document_fields, document.DocumentSnapshot): - snapshot = document_fields - document_fields = snapshot.to_dict() - document_fields["__name__"] = snapshot.reference - - if isinstance(document_fields, dict): - # Transform to list using orders - values = [] - data = document_fields - for order_key in order_keys: - try: - values.append(field_path_module.get_nested_value(order_key, data)) - except KeyError: - msg = _MISSING_ORDER_BY.format(order_key, data) - raise ValueError(msg) - document_fields = values - - if len(document_fields) != len(orders): - msg = _MISMATCH_CURSOR_W_ORDER_BY.format(document_fields, order_keys) - raise ValueError(msg) - - _transform_bases = (transforms.Sentinel, transforms._ValueList) - - for index, key_field in enumerate(zip(order_keys, document_fields)): - key, field = key_field - - if isinstance(field, _transform_bases): - msg = _INVALID_CURSOR_TRANSFORM - raise ValueError(msg) - - if key == "__name__" and isinstance(field, six.string_types): - document_fields[index] = self._parent.document(field) - - return document_fields, before - - def _to_protobuf(self): - """Convert the current query into the equivalent protobuf. - - Returns: - google.cloud.firestore_v1beta1.types.StructuredQuery: The - query protobuf. - """ - projection = self._normalize_projection(self._projection) - orders = self._normalize_orders() - start_at = self._normalize_cursor(self._start_at, orders) - end_at = self._normalize_cursor(self._end_at, orders) - - query_kwargs = { - "select": projection, - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=self._parent.id) - ], - "where": self._filters_pb(), - "order_by": orders, - "start_at": _cursor_pb(start_at), - "end_at": _cursor_pb(end_at), - } - if self._offset is not None: - query_kwargs["offset"] = self._offset - if self._limit is not None: - query_kwargs["limit"] = wrappers_pb2.Int32Value(value=self._limit) - - return query.StructuredQuery(**query_kwargs) - - def get(self, transaction=None): - """Deprecated alias for :meth:`stream`.""" - warnings.warn( - "'Query.get' is deprecated: please use 'Query.stream' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.stream(transaction=transaction) - - def stream(self, transaction=None): - """Read the documents in the collection that match this query. - - This sends a ``RunQuery`` RPC and then returns an iterator which - consumes each document returned in the stream of ``RunQueryResponse`` - messages. - - .. note:: - - The underlying stream of responses will time out after - the ``max_rpc_timeout_millis`` value set in the GAPIC - client configuration for the ``RunQuery`` API. Snapshots - not consumed from the iterator before that point will be lost. - - If a ``transaction`` is used and it already has write operations - added, this method cannot be used (i.e. read-after-write is not - allowed). - - Args: - transaction (Optional[~.firestore_v1beta1.transaction.\ - Transaction]): An existing transaction that this query will - run in. - - Yields: - ~.firestore_v1beta1.document.DocumentSnapshot: The next - document that fulfills the query. - """ - parent_path, expected_prefix = self._parent._parent_info() - response_iterator = self._client._firestore_api.run_query( - request={ - "parent": parent_path, - "structured_query": self._to_protobuf(), - "transaction": _helpers.get_transaction_id(transaction), - }, - metadata=self._client._rpc_metadata, - ) - - for response in response_iterator: - snapshot = _query_response_to_snapshot( - response, self._parent, expected_prefix - ) - if snapshot is not None: - yield snapshot - - def on_snapshot(self, callback): - """Monitor the documents in this collection that match this query. - - This starts a watch on this query using a background thread. The - provided callback is run on the snapshot of the documents. - - Args: - callback(~.firestore.query.QuerySnapshot): a callback to run when - a change occurs. - - Example: - from google.cloud import firestore_v1beta1 - - db = firestore_v1beta1.Client() - query_ref = db.collection(u'users').where("user", "==", u'Ada') - - def on_snapshot(docs, changes, read_time): - for doc in docs: - print(u'{} => {}'.format(doc.id, doc.to_dict())) - - # Watch this query - query_watch = query_ref.on_snapshot(on_snapshot) - - # Terminate this watch - query_watch.unsubscribe() - """ - return Watch.for_query( - self, callback, document.DocumentSnapshot, document.DocumentReference - ) - - def _comparator(self, doc1, doc2): - _orders = self._orders - - # Add implicit sorting by name, using the last specified direction. - if len(_orders) == 0: - lastDirection = Query.ASCENDING - else: - if _orders[-1].direction == 1: - lastDirection = Query.ASCENDING - else: - lastDirection = Query.DESCENDING - - orderBys = list(_orders) - - order_pb = query.StructuredQuery.Order( - field=query.StructuredQuery.FieldReference(field_path="id"), - direction=_enum_from_direction(lastDirection), - ) - orderBys.append(order_pb) - - for orderBy in orderBys: - if orderBy.field.field_path == "id": - # If ordering by docuent id, compare resource paths. - comp = Order()._compare_to(doc1.reference._path, doc2.reference._path) - else: - if ( - orderBy.field.field_path not in doc1._data - or orderBy.field.field_path not in doc2._data - ): - raise ValueError( - "Can only compare fields that exist in the " - "DocumentSnapshot. Please include the fields you are " - "ordering on in your select() call." - ) - v1 = doc1._data[orderBy.field.field_path] - v2 = doc2._data[orderBy.field.field_path] - encoded_v1 = _helpers.encode_value(v1) - encoded_v2 = _helpers.encode_value(v2) - comp = Order().compare(encoded_v1, encoded_v2) - - if comp != 0: - # 1 == Ascending, -1 == Descending - return orderBy.direction * comp - - return 0 - - -def _enum_from_op_string(op_string): - """Convert a string representation of a binary operator to an enum. - - These enums come from the protobuf message definition - ``StructuredQuery.FieldFilter.Operator``. - - Args: - op_string (str): A comparison operation in the form of a string. - Acceptable values are ``<``, ``<=``, ``==``, ``>=`` - and ``>``. - - Returns: - int: The enum corresponding to ``op_string``. - - Raises: - ValueError: If ``op_string`` is not a valid operator. - """ - try: - return _COMPARISON_OPERATORS[op_string] - except KeyError: - choices = ", ".join(sorted(_COMPARISON_OPERATORS.keys())) - msg = _BAD_OP_STRING.format(op_string, choices) - raise ValueError(msg) - - -def _isnan(value): - """Check if a value is NaN. - - This differs from ``math.isnan`` in that **any** input type is - allowed. - - Args: - value (Any): A value to check for NaN-ness. - - Returns: - bool: Indicates if the value is the NaN float. - """ - if isinstance(value, float): - return math.isnan(value) - else: - return False - - -def _enum_from_direction(direction): - """Convert a string representation of a direction to an enum. - - Args: - direction (str): A direction to order by. Must be one of - :attr:`~google.cloud.firestore.Query.ASCENDING` or - :attr:`~google.cloud.firestore.Query.DESCENDING`. - - Returns: - int: The enum corresponding to ``direction``. - - Raises: - ValueError: If ``direction`` is not a valid direction. - """ - if isinstance(direction, int): - return direction - - if direction == Query.ASCENDING: - return StructuredQuery.Direction.ASCENDING - elif direction == Query.DESCENDING: - return StructuredQuery.Direction.DESCENDING - else: - msg = _BAD_DIR_STRING.format(direction, Query.ASCENDING, Query.DESCENDING) - raise ValueError(msg) - - -def _filter_pb(field_or_unary): - """Convert a specific protobuf filter to the generic filter type. - - Args: - field_or_unary (Union[google.cloud.proto.firestore.v1beta1.\ - query.StructuredQuery.FieldFilter, google.cloud.proto.\ - firestore.v1beta1.query.StructuredQuery.FieldFilter]): A - field or unary filter to convert to a generic filter. - - Returns: - google.cloud.firestore_v1beta1.types.\ - StructuredQuery.Filter: A "generic" filter. - - Raises: - ValueError: If ``field_or_unary`` is not a field or unary filter. - """ - if isinstance(field_or_unary, query.StructuredQuery.FieldFilter): - return query.StructuredQuery.Filter(field_filter=field_or_unary) - elif isinstance(field_or_unary, query.StructuredQuery.UnaryFilter): - return query.StructuredQuery.Filter(unary_filter=field_or_unary) - else: - raise ValueError("Unexpected filter type", type(field_or_unary), field_or_unary) - - -def _cursor_pb(cursor_pair): - """Convert a cursor pair to a protobuf. - - If ``cursor_pair`` is :data:`None`, just returns :data:`None`. - - Args: - cursor_pair (Optional[Tuple[list, bool]]): Two-tuple of - - * a list of field values. - * a ``before`` flag - - Returns: - Optional[google.cloud.firestore_v1beta1.types.Cursor]: A - protobuf cursor corresponding to the values. - """ - if cursor_pair is not None: - data, before = cursor_pair - value_pbs = [_helpers.encode_value(value) for value in data] - return query.Cursor(values=value_pbs, before=before) - - -def _query_response_to_snapshot(response_pb, collection, expected_prefix): - """Parse a query response protobuf to a document snapshot. - - Args: - response_pb (google.cloud.proto.firestore.v1beta1.\ - firestore.RunQueryResponse): A - collection (~.firestore_v1beta1.collection.CollectionReference): A - reference to the collection that initiated the query. - expected_prefix (str): The expected prefix for fully-qualified - document names returned in the query results. This can be computed - directly from ``collection`` via :meth:`_parent_info`. - - Returns: - Optional[~.firestore.document.DocumentSnapshot]: A - snapshot of the data returned in the query. If ``response_pb.document`` - is not set, the snapshot will be :data:`None`. - """ - if not response_pb._pb.HasField("document"): - return None - - document_id = _helpers.get_doc_id(response_pb.document, expected_prefix) - reference = collection.document(document_id) - data = _helpers.decode_dict(response_pb.document.fields, collection._client) - snapshot = document.DocumentSnapshot( - reference, - data, - exists=True, - read_time=response_pb._pb.read_time, - create_time=response_pb._pb.document.create_time, - update_time=response_pb._pb.document.update_time, - ) - return snapshot diff --git a/google/cloud/firestore_v1beta1/services/__init__.py b/google/cloud/firestore_v1beta1/services/__init__.py deleted file mode 100644 index 42ffdf2bc..000000000 --- a/google/cloud/firestore_v1beta1/services/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/firestore_v1beta1/services/firestore/__init__.py b/google/cloud/firestore_v1beta1/services/firestore/__init__.py deleted file mode 100644 index 14099c867..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .client import FirestoreClient -from .async_client import FirestoreAsyncClient - -__all__ = ( - "FirestoreClient", - "FirestoreAsyncClient", -) diff --git a/google/cloud/firestore_v1beta1/services/firestore/async_client.py b/google/cloud/firestore_v1beta1/services/firestore/async_client.py deleted file mode 100644 index f3323c9be..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/async_client.py +++ /dev/null @@ -1,946 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from collections import OrderedDict -import functools -import re -from typing import Dict, AsyncIterable, AsyncIterator, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.firestore_v1beta1.services.firestore import pagers -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.cloud.firestore_v1beta1.types import write as gf_write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - -from .transports.base import FirestoreTransport -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport -from .client import FirestoreClient - - -class FirestoreAsyncClient: - """The Cloud Firestore service. - - This service exposes several types of comparable timestamps: - - - ``create_time`` - The time at which a document was created. - Changes only when a document is deleted, then re-created. - Increases in a strict monotonic fashion. - - ``update_time`` - The time at which a document was last updated. - Changes every time a document is modified. Does not change when a - write results in no modifications. Increases in a strict - monotonic fashion. - - ``read_time`` - The time at which a particular state was - observed. Used to denote a consistent snapshot of the database or - the time at which a Document was observed to not exist. - - ``commit_time`` - The time at which the writes in a transaction - were committed. Any read with an equal or greater ``read_time`` - is guaranteed to see the effects of the transaction. - """ - - _client: FirestoreClient - - DEFAULT_ENDPOINT = FirestoreClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FirestoreClient.DEFAULT_MTLS_ENDPOINT - - from_service_account_file = FirestoreClient.from_service_account_file - from_service_account_json = from_service_account_file - - get_transport_class = functools.partial( - type(FirestoreClient).get_transport_class, type(FirestoreClient) - ) - - def __init__( - self, - *, - credentials: credentials.Credentials = None, - transport: Union[str, FirestoreTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - ) -> None: - """Instantiate the firestore client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.FirestoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - - self._client = FirestoreClient( - credentials=credentials, transport=transport, client_options=client_options, - ) - - async def get_document( - self, - request: firestore.GetDocumentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Gets a single document. - - Args: - request (:class:`~.firestore.GetDocumentRequest`): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_documents( - self, - request: firestore.ListDocumentsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDocumentsAsyncPager: - r"""Lists documents. - - Args: - request (:class:`~.firestore.ListDocumentsRequest`): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.pagers.ListDocumentsAsyncPager: - The response for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_documents, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDocumentsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_document( - self, - request: firestore.CreateDocumentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Creates a new document. - - Args: - request (:class:`~.firestore.CreateDocumentRequest`): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def update_document( - self, - request: firestore.UpdateDocumentRequest = None, - *, - document: gf_document.Document = None, - update_mask: common.DocumentMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - Args: - request (:class:`~.firestore.UpdateDocumentRequest`): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. - document (:class:`~.gf_document.Document`): - Required. The updated document. - Creates the document if it does not - already exist. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`~.common.DocumentMask`): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.gf_document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([document, update_mask]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.UpdateDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("document.name", request.document.name),) - ), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_document( - self, - request: firestore.DeleteDocumentRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a document. - - Args: - request (:class:`~.firestore.DeleteDocumentRequest`): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. - name (:class:`str`): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([name]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.DeleteDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def batch_get_documents( - self, - request: firestore.BatchGetDocumentsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[firestore.BatchGetDocumentsResponse]: - r"""Gets multiple documents. - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Args: - request (:class:`~.firestore.BatchGetDocumentsRequest`): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[~.firestore.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_get_documents, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def begin_transaction( - self, - request: firestore.BeginTransactionRequest = None, - *, - database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - Args: - request (:class:`~.firestore.BeginTransactionRequest`): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def commit( - self, - request: firestore.CommitRequest = None, - *, - database: str = None, - writes: Sequence[gf_write.Write] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - Args: - request (:class:`~.firestore.CommitRequest`): - The request object. The request for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (:class:`Sequence[~.gf_write.Write]`): - The writes to apply. - Always executed atomically and in order. - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database, writes]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - if writes is not None: - request.writes = writes - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def rollback( - self, - request: firestore.RollbackRequest = None, - *, - database: str = None, - transaction: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction. - - Args: - request (:class:`~.firestore.RollbackRequest`): - The request object. The request for - [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - Required. The transaction to roll - back. - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database, transaction]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def run_query( - self, - request: firestore.RunQueryRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[firestore.RunQueryResponse]: - r"""Runs a query. - - Args: - request (:class:`~.firestore.RunQueryRequest`): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[~.firestore.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.run_query, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def write( - self, - requests: AsyncIterator[firestore.WriteRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[firestore.WriteResponse]: - r"""Streams batches of document updates and deletes, in - order. - - Args: - requests (AsyncIterator[`~.firestore.WriteRequest`]): - The request object AsyncIterator. The request for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - The first request creates a stream, or resumes an - existing one from a token. - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[~.firestore.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def listen( - self, - requests: AsyncIterator[firestore.ListenRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[firestore.ListenResponse]: - r"""Listens to changes. - - Args: - requests (AsyncIterator[`~.firestore.ListenRequest`]): - The request object AsyncIterator. A request for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[~.firestore.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.listen, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_collection_ids( - self, - request: firestore.ListCollectionIdsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.ListCollectionIdsResponse: - r"""Lists all the collection IDs underneath a document. - - Args: - request (:class:`~.firestore.ListCollectionIdsRequest`): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - parent (:class:`str`): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.ListCollectionIdsResponse: - The response from - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.ListCollectionIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_collection_ids, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -try: - _client_info = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version, - ) -except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() - - -__all__ = ("FirestoreAsyncClient",) diff --git a/google/cloud/firestore_v1beta1/services/firestore/client.py b/google/cloud/firestore_v1beta1/services/firestore/client.py deleted file mode 100644 index 058fe41f4..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/client.py +++ /dev/null @@ -1,1059 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from collections import OrderedDict -import os -import re -from typing import Callable, Dict, Iterable, Iterator, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.firestore_v1beta1.services.firestore import pagers -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.cloud.firestore_v1beta1.types import write as gf_write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - -from .transports.base import FirestoreTransport -from .transports.grpc import FirestoreGrpcTransport -from .transports.grpc_asyncio import FirestoreGrpcAsyncIOTransport - - -class FirestoreClientMeta(type): - """Metaclass for the Firestore client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] - _transport_registry["grpc"] = FirestoreGrpcTransport - _transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport - - def get_transport_class(cls, label: str = None,) -> Type[FirestoreTransport]: - """Return an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FirestoreClient(metaclass=FirestoreClientMeta): - """The Cloud Firestore service. - - This service exposes several types of comparable timestamps: - - - ``create_time`` - The time at which a document was created. - Changes only when a document is deleted, then re-created. - Increases in a strict monotonic fashion. - - ``update_time`` - The time at which a document was last updated. - Changes every time a document is modified. Does not change when a - write results in no modifications. Increases in a strict - monotonic fashion. - - ``read_time`` - The time at which a particular state was - observed. Used to denote a consistent snapshot of the database or - the time at which a Document was observed to not exist. - - ``commit_time`` - The time at which the writes in a transaction - were committed. Any read with an equal or greater ``read_time`` - is guaranteed to see the effects of the transaction. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "firestore.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - {@api.name}: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - *, - credentials: credentials.Credentials = None, - transport: Union[str, FirestoreTransport] = None, - client_options: ClientOptions = None, - ) -> None: - """Instantiate the firestore client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.FirestoreTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) - if client_options is None: - client_options = ClientOptions.ClientOptions() - - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") - if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT - ) - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, FirestoreTransport): - # transport is a FirestoreTransport instance. - if credentials or client_options.credentials_file: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, - scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, - ) - - def get_document( - self, - request: firestore.GetDocumentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Gets a single document. - - Args: - request (:class:`~.firestore.GetDocumentRequest`): - The request object. The request for - [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - - request = firestore.GetDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_documents( - self, - request: firestore.ListDocumentsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDocumentsPager: - r"""Lists documents. - - Args: - request (:class:`~.firestore.ListDocumentsRequest`): - The request object. The request for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.pagers.ListDocumentsPager: - The response for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - - request = firestore.ListDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_documents, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDocumentsPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - def create_document( - self, - request: firestore.CreateDocumentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> document.Document: - r"""Creates a new document. - - Args: - request (:class:`~.firestore.CreateDocumentRequest`): - The request object. The request for - [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - - request = firestore.CreateDocumentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def update_document( - self, - request: firestore.UpdateDocumentRequest = None, - *, - document: gf_document.Document = None, - update_mask: common.DocumentMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gf_document.Document: - r"""Updates or inserts a document. - - Args: - request (:class:`~.firestore.UpdateDocumentRequest`): - The request object. The request for - [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. - document (:class:`~.gf_document.Document`): - Required. The updated document. - Creates the document if it does not - already exist. - This corresponds to the ``document`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`~.common.DocumentMask`): - The fields to update. - None of the field paths in the mask may - contain a reserved name. - If the document exists on the server and - has fields not referenced in the mask, - they are left unchanged. - Fields referenced in the mask, but not - present in the input document, are - deleted from the document on the server. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.gf_document.Document: - A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([document, update_mask]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.UpdateDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if document is not None: - request.document = document - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("document.name", request.document.name),) - ), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_document( - self, - request: firestore.DeleteDocumentRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a document. - - Args: - request (:class:`~.firestore.DeleteDocumentRequest`): - The request object. The request for - [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. - name (:class:`str`): - Required. The resource name of the Document to delete. - In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([name]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.DeleteDocumentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_document, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def batch_get_documents( - self, - request: firestore.BatchGetDocumentsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.BatchGetDocumentsResponse]: - r"""Gets multiple documents. - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Args: - request (:class:`~.firestore.BatchGetDocumentsRequest`): - The request object. The request for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[~.firestore.BatchGetDocumentsResponse]: - The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - """ - # Create or coerce a protobuf request object. - - request = firestore.BatchGetDocumentsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.batch_get_documents, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def begin_transaction( - self, - request: firestore.BeginTransactionRequest = None, - *, - database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.BeginTransactionResponse: - r"""Starts a new transaction. - - Args: - request (:class:`~.firestore.BeginTransactionRequest`): - The request object. The request for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.BeginTransactionResponse: - The response for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.begin_transaction, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def commit( - self, - request: firestore.CommitRequest = None, - *, - database: str = None, - writes: Sequence[gf_write.Write] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.CommitResponse: - r"""Commits a transaction, while optionally updating - documents. - - Args: - request (:class:`~.firestore.CommitRequest`): - The request object. The request for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - writes (:class:`Sequence[~.gf_write.Write]`): - The writes to apply. - Always executed atomically and in order. - This corresponds to the ``writes`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.CommitResponse: - The response for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database, writes]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - if writes is not None: - request.writes = writes - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.commit, default_timeout=None, client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def rollback( - self, - request: firestore.RollbackRequest = None, - *, - database: str = None, - transaction: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction. - - Args: - request (:class:`~.firestore.RollbackRequest`): - The request object. The request for - [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. - database (:class:`str`): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction (:class:`bytes`): - Required. The transaction to roll - back. - This corresponds to the ``transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([database, transaction]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if database is not None: - request.database = database - if transaction is not None: - request.transaction = transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.rollback, default_timeout=None, client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("database", request.database),)), - ) - - # Send the request. - rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def run_query( - self, - request: firestore.RunQueryRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.RunQueryResponse]: - r"""Runs a query. - - Args: - request (:class:`~.firestore.RunQueryRequest`): - The request object. The request for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[~.firestore.RunQueryResponse]: - The response for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - """ - # Create or coerce a protobuf request object. - - request = firestore.RunQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.run_query, default_timeout=None, client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def write( - self, - requests: Iterator[firestore.WriteRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.WriteResponse]: - r"""Streams batches of document updates and deletes, in - order. - - Args: - requests (Iterator[`~.firestore.WriteRequest`]): - The request object iterator. The request for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - The first request creates a stream, or resumes an - existing one from a token. - When creating a new stream, the server replies with a - response containing only an ID and a token, to use in - the next request. - - When resuming a stream, the server first streams any - responses later than the given token, then a response - containing only an up-to-date token, to use in the next - request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[~.firestore.WriteResponse]: - The response for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.write, default_timeout=None, client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def listen( - self, - requests: Iterator[firestore.ListenRequest] = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[firestore.ListenResponse]: - r"""Listens to changes. - - Args: - requests (Iterator[`~.firestore.ListenRequest`]): - The request object iterator. A request for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[~.firestore.ListenResponse]: - The response for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.listen, default_timeout=None, client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(()),) - - # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_collection_ids( - self, - request: firestore.ListCollectionIdsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> firestore.ListCollectionIdsResponse: - r"""Lists all the collection IDs underneath a document. - - Args: - request (:class:`~.firestore.ListCollectionIdsRequest`): - The request object. The request for - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - parent (:class:`str`): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.firestore.ListCollectionIdsResponse: - The response from - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = firestore.ListCollectionIdsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_collection_ids, - default_timeout=None, - client_info=_client_info, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - -try: - _client_info = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-firestore",).version, - ) -except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() - - -__all__ = ("FirestoreClient",) diff --git a/google/cloud/firestore_v1beta1/services/firestore/pagers.py b/google/cloud/firestore_v1beta1/services/firestore/pagers.py deleted file mode 100644 index 544607290..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/pagers.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple - -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import firestore - - -class ListDocumentsPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`~.firestore.ListDocumentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`~.firestore.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., firestore.ListDocumentsResponse], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (:class:`~.firestore.ListDocumentsRequest`): - The initial request object. - response (:class:`~.firestore.ListDocumentsResponse`): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[document.Document]: - for page in self.pages: - yield from page.documents - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListDocumentsAsyncPager: - """A pager for iterating through ``list_documents`` requests. - - This class thinly wraps an initial - :class:`~.firestore.ListDocumentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``documents`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDocuments`` requests and continue to iterate - through the ``documents`` field on the - corresponding responses. - - All the usual :class:`~.firestore.ListDocumentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[firestore.ListDocumentsResponse]], - request: firestore.ListDocumentsRequest, - response: firestore.ListDocumentsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (:class:`~.firestore.ListDocumentsRequest`): - The initial request object. - response (:class:`~.firestore.ListDocumentsResponse`): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = firestore.ListDocumentsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[firestore.ListDocumentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[document.Document]: - async def async_generator(): - async for page in self.pages: - for response in page.documents: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/firestore_v1beta1/services/firestore/transports/__init__.py b/google/cloud/firestore_v1beta1/services/firestore/transports/__init__.py deleted file mode 100644 index ce6aa3a9d..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/transports/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from collections import OrderedDict -from typing import Dict, Type - -from .base import FirestoreTransport -from .grpc import FirestoreGrpcTransport -from .grpc_asyncio import FirestoreGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FirestoreTransport]] -_transport_registry["grpc"] = FirestoreGrpcTransport -_transport_registry["grpc_asyncio"] = FirestoreGrpcAsyncIOTransport - - -__all__ = ( - "FirestoreTransport", - "FirestoreGrpcTransport", - "FirestoreGrpcAsyncIOTransport", -) diff --git a/google/cloud/firestore_v1beta1/services/firestore/transports/base.py b/google/cloud/firestore_v1beta1/services/firestore/transports/base.py deleted file mode 100644 index b2c5e3cbf..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/transports/base.py +++ /dev/null @@ -1,222 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import abc -import typing - -from google import auth -from google.api_core import exceptions # type: ignore -from google.auth import credentials # type: ignore - -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - - -class FirestoreTransport(abc.ABC): - """Abstract transport class for Firestore.""" - - AUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes - ) - elif credentials is None: - credentials, _ = auth.default(scopes=scopes) - - # Save the credentials. - self._credentials = credentials - - @property - def get_document( - self, - ) -> typing.Callable[ - [firestore.GetDocumentRequest], - typing.Union[document.Document, typing.Awaitable[document.Document]], - ]: - raise NotImplementedError() - - @property - def list_documents( - self, - ) -> typing.Callable[ - [firestore.ListDocumentsRequest], - typing.Union[ - firestore.ListDocumentsResponse, - typing.Awaitable[firestore.ListDocumentsResponse], - ], - ]: - raise NotImplementedError() - - @property - def create_document( - self, - ) -> typing.Callable[ - [firestore.CreateDocumentRequest], - typing.Union[document.Document, typing.Awaitable[document.Document]], - ]: - raise NotImplementedError() - - @property - def update_document( - self, - ) -> typing.Callable[ - [firestore.UpdateDocumentRequest], - typing.Union[gf_document.Document, typing.Awaitable[gf_document.Document]], - ]: - raise NotImplementedError() - - @property - def delete_document( - self, - ) -> typing.Callable[ - [firestore.DeleteDocumentRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], - ]: - raise NotImplementedError() - - @property - def batch_get_documents( - self, - ) -> typing.Callable[ - [firestore.BatchGetDocumentsRequest], - typing.Union[ - firestore.BatchGetDocumentsResponse, - typing.Awaitable[firestore.BatchGetDocumentsResponse], - ], - ]: - raise NotImplementedError() - - @property - def begin_transaction( - self, - ) -> typing.Callable[ - [firestore.BeginTransactionRequest], - typing.Union[ - firestore.BeginTransactionResponse, - typing.Awaitable[firestore.BeginTransactionResponse], - ], - ]: - raise NotImplementedError() - - @property - def commit( - self, - ) -> typing.Callable[ - [firestore.CommitRequest], - typing.Union[ - firestore.CommitResponse, typing.Awaitable[firestore.CommitResponse] - ], - ]: - raise NotImplementedError() - - @property - def rollback( - self, - ) -> typing.Callable[ - [firestore.RollbackRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], - ]: - raise NotImplementedError() - - @property - def run_query( - self, - ) -> typing.Callable[ - [firestore.RunQueryRequest], - typing.Union[ - firestore.RunQueryResponse, typing.Awaitable[firestore.RunQueryResponse] - ], - ]: - raise NotImplementedError() - - @property - def write( - self, - ) -> typing.Callable[ - [firestore.WriteRequest], - typing.Union[ - firestore.WriteResponse, typing.Awaitable[firestore.WriteResponse] - ], - ]: - raise NotImplementedError() - - @property - def listen( - self, - ) -> typing.Callable[ - [firestore.ListenRequest], - typing.Union[ - firestore.ListenResponse, typing.Awaitable[firestore.ListenResponse] - ], - ]: - raise NotImplementedError() - - @property - def list_collection_ids( - self, - ) -> typing.Callable[ - [firestore.ListCollectionIdsRequest], - typing.Union[ - firestore.ListCollectionIdsResponse, - typing.Awaitable[firestore.ListCollectionIdsResponse], - ], - ]: - raise NotImplementedError() - - -__all__ = ("FirestoreTransport",) diff --git a/google/cloud/firestore_v1beta1/services/firestore/transports/grpc.py b/google/cloud/firestore_v1beta1/services/firestore/transports/grpc.py deleted file mode 100644 index 8f9a29f27..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/transports/grpc.py +++ /dev/null @@ -1,555 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from typing import Callable, Dict, Optional, Sequence, Tuple - -from google.api_core import grpc_helpers # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - - -import grpc # type: ignore - -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - -from .base import FirestoreTransport - - -class FirestoreGrpcTransport(FirestoreTransport): - """gRPC backend transport for Firestore. - - The Cloud Firestore service. - - This service exposes several types of comparable timestamps: - - - ``create_time`` - The time at which a document was created. - Changes only when a document is deleted, then re-created. - Increases in a strict monotonic fashion. - - ``update_time`` - The time at which a document was last updated. - Changes every time a document is modified. Does not change when a - write results in no modifications. Increases in a strict - monotonic fashion. - - ``read_time`` - The time at which a particular state was - observed. Used to denote a consistent snapshot of the database or - the time at which a Document was observed to not exist. - - ``commit_time`` - The time at which the writes in a transaction - were committed. Any read with an equal or greater ``read_time`` - is guaranteed to see the effects of the transaction. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. - credentials = False - - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - ) - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - ) - - self._stubs = {} # type: Dict[str, Callable] - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - **kwargs - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - address (Optionsl[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - scopes = scopes or cls.AUTH_SCOPES - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. - return self._grpc_channel - - @property - def get_document( - self, - ) -> Callable[[firestore.GetDocumentRequest], document.Document]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_document" not in self._stubs: - self._stubs["get_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/GetDocument", - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["get_document"] - - @property - def list_documents( - self, - ) -> Callable[[firestore.ListDocumentsRequest], firestore.ListDocumentsResponse]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - ~.ListDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/ListDocuments", - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs["list_documents"] - - @property - def create_document( - self, - ) -> Callable[[firestore.CreateDocumentRequest], document.Document]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_document" not in self._stubs: - self._stubs["create_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/CreateDocument", - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["create_document"] - - @property - def update_document( - self, - ) -> Callable[[firestore.UpdateDocumentRequest], gf_document.Document]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - ~.Document]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_document" not in self._stubs: - self._stubs["update_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/UpdateDocument", - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs["update_document"] - - @property - def delete_document( - self, - ) -> Callable[[firestore.DeleteDocumentRequest], empty.Empty]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/DeleteDocument", - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty.Empty.FromString, - ) - return self._stubs["delete_document"] - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], firestore.BatchGetDocumentsResponse - ]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - ~.BatchGetDocumentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream( - "/google.firestore.v1beta1.Firestore/BatchGetDocuments", - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs["batch_get_documents"] - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], firestore.BeginTransactionResponse - ]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.BeginTransactionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/BeginTransaction", - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs["begin_transaction"] - - @property - def commit(self) -> Callable[[firestore.CommitRequest], firestore.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/Commit", - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs["commit"] - - @property - def rollback(self) -> Callable[[firestore.RollbackRequest], empty.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/Rollback", - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty.Empty.FromString, - ) - return self._stubs["rollback"] - - @property - def run_query( - self, - ) -> Callable[[firestore.RunQueryRequest], firestore.RunQueryResponse]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - ~.RunQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_stream( - "/google.firestore.v1beta1.Firestore/RunQuery", - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs["run_query"] - - @property - def write(self) -> Callable[[firestore.WriteRequest], firestore.WriteResponse]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. - - Returns: - Callable[[~.WriteRequest], - ~.WriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "write" not in self._stubs: - self._stubs["write"] = self.grpc_channel.stream_stream( - "/google.firestore.v1beta1.Firestore/Write", - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs["write"] - - @property - def listen(self) -> Callable[[firestore.ListenRequest], firestore.ListenResponse]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. - - Returns: - Callable[[~.ListenRequest], - ~.ListenResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "listen" not in self._stubs: - self._stubs["listen"] = self.grpc_channel.stream_stream( - "/google.firestore.v1beta1.Firestore/Listen", - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs["listen"] - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], firestore.ListCollectionIdsResponse - ]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - ~.ListCollectionIdsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/ListCollectionIds", - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs["list_collection_ids"] - - -__all__ = ("FirestoreGrpcTransport",) diff --git a/google/cloud/firestore_v1beta1/services/firestore/transports/grpc_asyncio.py b/google/cloud/firestore_v1beta1/services/firestore/transports/grpc_asyncio.py deleted file mode 100644 index d9ed6ebe5..000000000 --- a/google/cloud/firestore_v1beta1/services/firestore/transports/grpc_asyncio.py +++ /dev/null @@ -1,561 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple - -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.protobuf import empty_pb2 as empty # type: ignore - -from .base import FirestoreTransport -from .grpc import FirestoreGrpcTransport - - -class FirestoreGrpcAsyncIOTransport(FirestoreTransport): - """gRPC AsyncIO backend transport for Firestore. - - The Cloud Firestore service. - - This service exposes several types of comparable timestamps: - - - ``create_time`` - The time at which a document was created. - Changes only when a document is deleted, then re-created. - Increases in a strict monotonic fashion. - - ``update_time`` - The time at which a document was last updated. - Changes every time a document is modified. Does not change when a - write results in no modifications. Increases in a strict - monotonic fashion. - - ``read_time`` - The time at which a particular state was - observed. Used to denote a consistent snapshot of the database or - the time at which a Document was observed to not exist. - - ``commit_time`` - The time at which the writes in a transaction - were committed. Any read with an equal or greater ``read_time`` - is guaranteed to see the effects of the transaction. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - **kwargs - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - address (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - scopes = scopes or cls.AUTH_SCOPES - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - **kwargs - ) - - def __init__( - self, - *, - host: str = "firestore.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. - credentials = False - - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - ) - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - ) - - self._stubs = {} - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. - return self._grpc_channel - - @property - def get_document( - self, - ) -> Callable[[firestore.GetDocumentRequest], Awaitable[document.Document]]: - r"""Return a callable for the get document method over gRPC. - - Gets a single document. - - Returns: - Callable[[~.GetDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_document" not in self._stubs: - self._stubs["get_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/GetDocument", - request_serializer=firestore.GetDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["get_document"] - - @property - def list_documents( - self, - ) -> Callable[ - [firestore.ListDocumentsRequest], Awaitable[firestore.ListDocumentsResponse] - ]: - r"""Return a callable for the list documents method over gRPC. - - Lists documents. - - Returns: - Callable[[~.ListDocumentsRequest], - Awaitable[~.ListDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_documents" not in self._stubs: - self._stubs["list_documents"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/ListDocuments", - request_serializer=firestore.ListDocumentsRequest.serialize, - response_deserializer=firestore.ListDocumentsResponse.deserialize, - ) - return self._stubs["list_documents"] - - @property - def create_document( - self, - ) -> Callable[[firestore.CreateDocumentRequest], Awaitable[document.Document]]: - r"""Return a callable for the create document method over gRPC. - - Creates a new document. - - Returns: - Callable[[~.CreateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_document" not in self._stubs: - self._stubs["create_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/CreateDocument", - request_serializer=firestore.CreateDocumentRequest.serialize, - response_deserializer=document.Document.deserialize, - ) - return self._stubs["create_document"] - - @property - def update_document( - self, - ) -> Callable[[firestore.UpdateDocumentRequest], Awaitable[gf_document.Document]]: - r"""Return a callable for the update document method over gRPC. - - Updates or inserts a document. - - Returns: - Callable[[~.UpdateDocumentRequest], - Awaitable[~.Document]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "update_document" not in self._stubs: - self._stubs["update_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/UpdateDocument", - request_serializer=firestore.UpdateDocumentRequest.serialize, - response_deserializer=gf_document.Document.deserialize, - ) - return self._stubs["update_document"] - - @property - def delete_document( - self, - ) -> Callable[[firestore.DeleteDocumentRequest], Awaitable[empty.Empty]]: - r"""Return a callable for the delete document method over gRPC. - - Deletes a document. - - Returns: - Callable[[~.DeleteDocumentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_document" not in self._stubs: - self._stubs["delete_document"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/DeleteDocument", - request_serializer=firestore.DeleteDocumentRequest.serialize, - response_deserializer=empty.Empty.FromString, - ) - return self._stubs["delete_document"] - - @property - def batch_get_documents( - self, - ) -> Callable[ - [firestore.BatchGetDocumentsRequest], - Awaitable[firestore.BatchGetDocumentsResponse], - ]: - r"""Return a callable for the batch get documents method over gRPC. - - Gets multiple documents. - Documents returned by this method are not guaranteed to - be returned in the same order that they were requested. - - Returns: - Callable[[~.BatchGetDocumentsRequest], - Awaitable[~.BatchGetDocumentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "batch_get_documents" not in self._stubs: - self._stubs["batch_get_documents"] = self.grpc_channel.unary_stream( - "/google.firestore.v1beta1.Firestore/BatchGetDocuments", - request_serializer=firestore.BatchGetDocumentsRequest.serialize, - response_deserializer=firestore.BatchGetDocumentsResponse.deserialize, - ) - return self._stubs["batch_get_documents"] - - @property - def begin_transaction( - self, - ) -> Callable[ - [firestore.BeginTransactionRequest], - Awaitable[firestore.BeginTransactionResponse], - ]: - r"""Return a callable for the begin transaction method over gRPC. - - Starts a new transaction. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.BeginTransactionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "begin_transaction" not in self._stubs: - self._stubs["begin_transaction"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/BeginTransaction", - request_serializer=firestore.BeginTransactionRequest.serialize, - response_deserializer=firestore.BeginTransactionResponse.deserialize, - ) - return self._stubs["begin_transaction"] - - @property - def commit( - self, - ) -> Callable[[firestore.CommitRequest], Awaitable[firestore.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction, while optionally updating - documents. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "commit" not in self._stubs: - self._stubs["commit"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/Commit", - request_serializer=firestore.CommitRequest.serialize, - response_deserializer=firestore.CommitResponse.deserialize, - ) - return self._stubs["commit"] - - @property - def rollback(self) -> Callable[[firestore.RollbackRequest], Awaitable[empty.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "rollback" not in self._stubs: - self._stubs["rollback"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/Rollback", - request_serializer=firestore.RollbackRequest.serialize, - response_deserializer=empty.Empty.FromString, - ) - return self._stubs["rollback"] - - @property - def run_query( - self, - ) -> Callable[[firestore.RunQueryRequest], Awaitable[firestore.RunQueryResponse]]: - r"""Return a callable for the run query method over gRPC. - - Runs a query. - - Returns: - Callable[[~.RunQueryRequest], - Awaitable[~.RunQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "run_query" not in self._stubs: - self._stubs["run_query"] = self.grpc_channel.unary_stream( - "/google.firestore.v1beta1.Firestore/RunQuery", - request_serializer=firestore.RunQueryRequest.serialize, - response_deserializer=firestore.RunQueryResponse.deserialize, - ) - return self._stubs["run_query"] - - @property - def write( - self, - ) -> Callable[[firestore.WriteRequest], Awaitable[firestore.WriteResponse]]: - r"""Return a callable for the write method over gRPC. - - Streams batches of document updates and deletes, in - order. - - Returns: - Callable[[~.WriteRequest], - Awaitable[~.WriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "write" not in self._stubs: - self._stubs["write"] = self.grpc_channel.stream_stream( - "/google.firestore.v1beta1.Firestore/Write", - request_serializer=firestore.WriteRequest.serialize, - response_deserializer=firestore.WriteResponse.deserialize, - ) - return self._stubs["write"] - - @property - def listen( - self, - ) -> Callable[[firestore.ListenRequest], Awaitable[firestore.ListenResponse]]: - r"""Return a callable for the listen method over gRPC. - - Listens to changes. - - Returns: - Callable[[~.ListenRequest], - Awaitable[~.ListenResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "listen" not in self._stubs: - self._stubs["listen"] = self.grpc_channel.stream_stream( - "/google.firestore.v1beta1.Firestore/Listen", - request_serializer=firestore.ListenRequest.serialize, - response_deserializer=firestore.ListenResponse.deserialize, - ) - return self._stubs["listen"] - - @property - def list_collection_ids( - self, - ) -> Callable[ - [firestore.ListCollectionIdsRequest], - Awaitable[firestore.ListCollectionIdsResponse], - ]: - r"""Return a callable for the list collection ids method over gRPC. - - Lists all the collection IDs underneath a document. - - Returns: - Callable[[~.ListCollectionIdsRequest], - Awaitable[~.ListCollectionIdsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_collection_ids" not in self._stubs: - self._stubs["list_collection_ids"] = self.grpc_channel.unary_unary( - "/google.firestore.v1beta1.Firestore/ListCollectionIds", - request_serializer=firestore.ListCollectionIdsRequest.serialize, - response_deserializer=firestore.ListCollectionIdsResponse.deserialize, - ) - return self._stubs["list_collection_ids"] - - -__all__ = ("FirestoreGrpcAsyncIOTransport",) diff --git a/google/cloud/firestore_v1beta1/transaction.py b/google/cloud/firestore_v1beta1/transaction.py deleted file mode 100644 index 7236119eb..000000000 --- a/google/cloud/firestore_v1beta1/transaction.py +++ /dev/null @@ -1,415 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpers for applying Google Cloud Firestore changes in a transaction.""" - - -import random -import time - -import six - -from google.api_core import exceptions -from google.cloud.firestore_v1beta1 import batch -from google.cloud.firestore_v1beta1 import types - - -MAX_ATTEMPTS = 5 -"""int: Default number of transaction attempts (with retries).""" -_CANT_BEGIN = "The transaction has already begun. Current transaction ID: {!r}." -_MISSING_ID_TEMPLATE = "The transaction has no transaction ID, so it cannot be {}." -_CANT_ROLLBACK = _MISSING_ID_TEMPLATE.format("rolled back") -_CANT_COMMIT = _MISSING_ID_TEMPLATE.format("committed") -_WRITE_READ_ONLY = "Cannot perform write operation in read-only transaction." -_INITIAL_SLEEP = 1.0 -"""float: Initial "max" for sleep interval. To be used in :func:`_sleep`.""" -_MAX_SLEEP = 30.0 -"""float: Eventual "max" sleep time. To be used in :func:`_sleep`.""" -_MULTIPLIER = 2.0 -"""float: Multiplier for exponential backoff. To be used in :func:`_sleep`.""" -_EXCEED_ATTEMPTS_TEMPLATE = "Failed to commit transaction in {:d} attempts." -_CANT_RETRY_READ_ONLY = "Only read-write transactions can be retried." - - -class Transaction(batch.WriteBatch): - """Accumulate read-and-write operations to be sent in a transaction. - - Args: - client (~.firestore_v1beta1.client.Client): The client that - created this transaction. - max_attempts (Optional[int]): The maximum number of attempts for - the transaction (i.e. allowing retries). Defaults to - :attr:`~google.cloud.firestore_v1beta1.transaction.MAX_ATTEMPTS`. - read_only (Optional[bool]): Flag indicating if the transaction - should be read-only or should allow writes. Defaults to - :data:`False`. - """ - - def __init__(self, client, max_attempts=MAX_ATTEMPTS, read_only=False): - super(Transaction, self).__init__(client) - self._max_attempts = max_attempts - self._read_only = read_only - self._id = None - - def _add_write_pbs(self, write_pbs): - """Add `Write`` protobufs to this transaction. - - Args: - write_pbs (List[google.cloud.proto.firestore.v1beta1.\ - write.Write]): A list of write protobufs to be added. - - Raises: - ValueError: If this transaction is read-only. - """ - if self._read_only: - raise ValueError(_WRITE_READ_ONLY) - - super(Transaction, self)._add_write_pbs(write_pbs) - - def _options_protobuf(self, retry_id): - """Convert the current object to protobuf. - - The ``retry_id`` value is used when retrying a transaction that - failed (e.g. due to contention). It is intended to be the "first" - transaction that failed (i.e. if multiple retries are needed). - - Args: - retry_id (Union[bytes, NoneType]): Transaction ID of a transaction - to be retried. - - Returns: - Optional[google.cloud.firestore_v1beta1.types.TransactionOptions]: - The protobuf ``TransactionOptions`` if ``read_only==True`` or if - there is a transaction ID to be retried, else :data:`None`. - - Raises: - ValueError: If ``retry_id`` is not :data:`None` but the - transaction is read-only. - """ - if retry_id is not None: - if self._read_only: - raise ValueError(_CANT_RETRY_READ_ONLY) - - return types.TransactionOptions( - read_write=types.TransactionOptions.ReadWrite( - retry_transaction=retry_id - ) - ) - elif self._read_only: - return types.TransactionOptions( - read_only=types.TransactionOptions.ReadOnly() - ) - else: - return None - - @property - def in_progress(self): - """Determine if this transaction has already begun. - - Returns: - bool: Indicates if the transaction has started. - """ - return self._id is not None - - @property - def id(self): - """Get the current transaction ID. - - Returns: - Optional[bytes]: The transaction ID (or :data:`None` if the - current transaction is not in progress). - """ - return self._id - - def _begin(self, retry_id=None): - """Begin the transaction. - - Args: - retry_id (Optional[bytes]): Transaction ID of a transaction to be - retried. - - Raises: - ValueError: If the current transaction has already begun. - """ - if self.in_progress: - msg = _CANT_BEGIN.format(self._id) - raise ValueError(msg) - - transaction_response = self._client._firestore_api.begin_transaction( - request={ - "database": self._client._database_string, - "options": self._options_protobuf(retry_id), - }, - metadata=self._client._rpc_metadata, - ) - self._id = transaction_response.transaction - - def _clean_up(self): - """Clean up the instance after :meth:`_rollback`` or :meth:`_commit``. - - This intended to occur on success or failure of the associated RPCs. - """ - self._write_pbs = [] - self._id = None - - def _rollback(self): - """Roll back the transaction. - - Raises: - ValueError: If no transaction is in progress. - """ - if not self.in_progress: - raise ValueError(_CANT_ROLLBACK) - - try: - # NOTE: The response is just ``google.protobuf.Empty``. - self._client._firestore_api.rollback( - request={ - "database": self._client._database_string, - "transaction": self._id, - }, - metadata=self._client._rpc_metadata, - ) - finally: - self._clean_up() - - def _commit(self): - """Transactionally commit the changes accumulated. - - Returns: - List[google.cloud.proto.firestore.v1beta1.\ - write.WriteResult, ...]: The write results corresponding - to the changes committed, returned in the same order as the - changes were applied to this transaction. A write result contains - an ``update_time`` field. - - Raises: - ValueError: If no transaction is in progress. - """ - if not self.in_progress: - raise ValueError(_CANT_COMMIT) - - commit_response = _commit_with_retry(self._client, self._write_pbs, self._id) - - self._clean_up() - return list(commit_response.write_results) - - -class _Transactional(object): - """Provide a callable object to use as a transactional decorater. - - This is surfaced via - :func:`~google.cloud.firestore_v1beta1.transaction.transactional`. - - Args: - to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \ - Any]): A callable that should be run (and retried) in a - transaction. - """ - - def __init__(self, to_wrap): - self.to_wrap = to_wrap - self.current_id = None - """Optional[bytes]: The current transaction ID.""" - self.retry_id = None - """Optional[bytes]: The ID of the first attempted transaction.""" - - def _reset(self): - """Unset the transaction IDs.""" - self.current_id = None - self.retry_id = None - - def _pre_commit(self, transaction, *args, **kwargs): - """Begin transaction and call the wrapped callable. - - If the callable raises an exception, the transaction will be rolled - back. If not, the transaction will be "ready" for ``Commit`` (i.e. - it will have staged writes). - - Args: - transaction (~.firestore_v1beta1.transaction.Transaction): A - transaction to execute the callable within. - args (Tuple[Any, ...]): The extra positional arguments to pass - along to the wrapped callable. - kwargs (Dict[str, Any]): The extra keyword arguments to pass - along to the wrapped callable. - - Returns: - Any: result of the wrapped callable. - - Raises: - Exception: Any failure caused by ``to_wrap``. - """ - # Force the ``transaction`` to be not "in progress". - transaction._clean_up() - transaction._begin(retry_id=self.retry_id) - - # Update the stored transaction IDs. - self.current_id = transaction._id - if self.retry_id is None: - self.retry_id = self.current_id - try: - return self.to_wrap(transaction, *args, **kwargs) - except: # noqa - # NOTE: If ``rollback`` fails this will lose the information - # from the original failure. - transaction._rollback() - raise - - def _maybe_commit(self, transaction): - """Try to commit the transaction. - - If the transaction is read-write and the ``Commit`` fails with the - ``ABORTED`` status code, it will be retried. Any other failure will - not be caught. - - Args: - transaction (~.firestore_v1beta1.transaction.Transaction): The - transaction to be ``Commit``-ed. - - Returns: - bool: Indicating if the commit succeeded. - """ - try: - transaction._commit() - return True - except exceptions.GoogleAPICallError as exc: - if transaction._read_only: - raise - - if isinstance(exc, exceptions.Aborted): - # If a read-write transaction returns ABORTED, retry. - return False - else: - raise - - def __call__(self, transaction, *args, **kwargs): - """Execute the wrapped callable within a transaction. - - Args: - transaction (~.firestore_v1beta1.transaction.Transaction): A - transaction to execute the callable within. - args (Tuple[Any, ...]): The extra positional arguments to pass - along to the wrapped callable. - kwargs (Dict[str, Any]): The extra keyword arguments to pass - along to the wrapped callable. - - Returns: - Any: The result of the wrapped callable. - - Raises: - ValueError: If the transaction does not succeed in - ``max_attempts``. - """ - self._reset() - - for attempt in six.moves.xrange(transaction._max_attempts): - result = self._pre_commit(transaction, *args, **kwargs) - succeeded = self._maybe_commit(transaction) - if succeeded: - return result - - # Subsequent requests will use the failed transaction ID as part of - # the ``BeginTransactionRequest`` when restarting this transaction - # (via ``options.retry_transaction``). This preserves the "spot in - # line" of the transaction, so exponential backoff is not required - # in this case. - - transaction._rollback() - msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) - raise ValueError(msg) - - -def transactional(to_wrap): - """Decorate a callable so that it runs in a transaction. - - Args: - to_wrap (Callable[~.firestore_v1beta1.transaction.Transaction, \ - Any]): A callable that should be run (and retried) in a - transaction. - - Returns: - Callable[~.firestore_v1beta1.transaction.Transaction, Any]: the - wrapped callable. - """ - return _Transactional(to_wrap) - - -def _commit_with_retry(client, write_pbs, transaction_id): - """Call ``Commit`` on the GAPIC client with retry / sleep. - - Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level - retry is handled by the underlying GAPICd client, but in this case it - doesn't because ``Commit`` is not always idempotent. But here we know it - is "idempotent"-like because it has a transaction ID. We also need to do - our own retry to special-case the ``INVALID_ARGUMENT`` error. - - Args: - client (~.firestore_v1beta1.client.Client): A client with - GAPIC client and configuration details. - write_pbs (List[google.cloud.proto.firestore.v1beta1.\ - write.Write, ...]): A ``Write`` protobuf instance to - be committed. - transaction_id (bytes): ID of an existing transaction that - this commit will run in. - - Returns: - google.cloud.firestore_v1beta1.types.CommitResponse: - The protobuf response from ``Commit``. - - Raises: - ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable - exception is encountered. - """ - current_sleep = _INITIAL_SLEEP - while True: - try: - return client._firestore_api.commit( - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": transaction_id, - }, - metadata=client._rpc_metadata, - ) - except exceptions.ServiceUnavailable: - # Retry - pass - - current_sleep = _sleep(current_sleep) - - -def _sleep(current_sleep, max_sleep=_MAX_SLEEP, multiplier=_MULTIPLIER): - """Sleep and produce a new sleep time. - - .. _Exponential Backoff And Jitter: https://www.awsarchitectureblog.com/\ - 2015/03/backoff.html - - Select a duration between zero and ``current_sleep``. It might seem - counterintuitive to have so much jitter, but - `Exponential Backoff And Jitter`_ argues that "full jitter" is - the best strategy. - - Args: - current_sleep (float): The current "max" for sleep interval. - max_sleep (Optional[float]): Eventual "max" sleep time - multiplier (Optional[float]): Multiplier for exponential backoff. - - Returns: - float: Newly doubled ``current_sleep`` or ``max_sleep`` (whichever - is smaller) - """ - actual_sleep = random.uniform(0.0, current_sleep) - time.sleep(actual_sleep) - return min(multiplier * current_sleep, max_sleep) diff --git a/google/cloud/firestore_v1beta1/transforms.py b/google/cloud/firestore_v1beta1/transforms.py deleted file mode 100644 index 4a9a94bfc..000000000 --- a/google/cloud/firestore_v1beta1/transforms.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpful constants to use for Google Cloud Firestore.""" - - -class Sentinel(object): - """Sentinel objects used to signal special handling.""" - - __slots__ = ("description",) - - def __init__(self, description): - self.description = description - - def __repr__(self): - return "Sentinel: {}".format(self.description) - - -DELETE_FIELD = Sentinel("Value used to delete a field in a document.") - - -SERVER_TIMESTAMP = Sentinel( - "Value used to set a document field to the server timestamp." -) - - -class _ValueList(object): - """Read-only list of values. - - Args: - values (List | Tuple): values held in the helper. - """ - - slots = ("_values",) - - def __init__(self, values): - if not isinstance(values, (list, tuple)): - raise ValueError("'values' must be a list or tuple.") - - if len(values) == 0: - raise ValueError("'values' must be non-empty.") - - self._values = list(values) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return NotImplemented - return self._values == other._values - - @property - def values(self): - """Values to append. - - Returns (List): - values to be appended by the transform. - """ - return self._values - - -class ArrayUnion(_ValueList): - """Field transform: appends missing values to an array field. - - See: - https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1beta1#google.cloud.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1beta1.ArrayValue.google.cloud.firestore.v1beta1.DocumentTransform.FieldTransform.append_missing_elements - - Args: - values (List | Tuple): values to append. - """ - - -class ArrayRemove(_ValueList): - """Field transform: remove values from an array field. - - See: - https://cloud.google.com/firestore/docs/reference/rpc/google.cloud.firestore.v1beta1#google.cloud.firestore.v1beta1.DocumentTransform.FieldTransform.FIELDS.google.cloud.firestore.v1beta1.ArrayValue.google.cloud.firestore.v1beta1.DocumentTransform.FieldTransform.remove_all_from_array - - Args: - values (List | Tuple): values to remove. - """ diff --git a/google/cloud/firestore_v1beta1/types/__init__.py b/google/cloud/firestore_v1beta1/types/__init__.py deleted file mode 100644 index c43763b71..000000000 --- a/google/cloud/firestore_v1beta1/types/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .common import ( - DocumentMask, - Precondition, - TransactionOptions, -) -from .document import ( - Document, - Value, - ArrayValue, - MapValue, -) -from .write import ( - Write, - DocumentTransform, - WriteResult, - DocumentChange, - DocumentDelete, - DocumentRemove, - ExistenceFilter, -) -from .query import ( - StructuredQuery, - Cursor, -) -from .firestore import ( - GetDocumentRequest, - ListDocumentsRequest, - ListDocumentsResponse, - CreateDocumentRequest, - UpdateDocumentRequest, - DeleteDocumentRequest, - BatchGetDocumentsRequest, - BatchGetDocumentsResponse, - BeginTransactionRequest, - BeginTransactionResponse, - CommitRequest, - CommitResponse, - RollbackRequest, - RunQueryRequest, - RunQueryResponse, - WriteRequest, - WriteResponse, - ListenRequest, - ListenResponse, - Target, - TargetChange, - ListCollectionIdsRequest, - ListCollectionIdsResponse, -) - - -__all__ = ( - "DocumentMask", - "Precondition", - "TransactionOptions", - "Document", - "Value", - "ArrayValue", - "MapValue", - "Write", - "DocumentTransform", - "WriteResult", - "DocumentChange", - "DocumentDelete", - "DocumentRemove", - "ExistenceFilter", - "StructuredQuery", - "Cursor", - "GetDocumentRequest", - "ListDocumentsRequest", - "ListDocumentsResponse", - "CreateDocumentRequest", - "UpdateDocumentRequest", - "DeleteDocumentRequest", - "BatchGetDocumentsRequest", - "BatchGetDocumentsResponse", - "BeginTransactionRequest", - "BeginTransactionResponse", - "CommitRequest", - "CommitResponse", - "RollbackRequest", - "RunQueryRequest", - "RunQueryResponse", - "WriteRequest", - "WriteResponse", - "ListenRequest", - "ListenResponse", - "Target", - "TargetChange", - "ListCollectionIdsRequest", - "ListCollectionIdsResponse", -) diff --git a/google/cloud/firestore_v1beta1/types/common.py b/google/cloud/firestore_v1beta1/types/common.py deleted file mode 100644 index 56bfccccf..000000000 --- a/google/cloud/firestore_v1beta1/types/common.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import proto # type: ignore - - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1beta1", - manifest={"DocumentMask", "Precondition", "TransactionOptions",}, -) - - -class DocumentMask(proto.Message): - r"""A set of field paths on a document. Used to restrict a get or update - operation on a document to a subset of its fields. This is different - from standard field masks, as this is always scoped to a - [Document][google.firestore.v1beta1.Document], and takes in account - the dynamic nature of [Value][google.firestore.v1beta1.Value]. - - Attributes: - field_paths (Sequence[str]): - The list of field paths in the mask. See - [Document.fields][google.firestore.v1beta1.Document.fields] - for a field path syntax reference. - """ - - field_paths = proto.RepeatedField(proto.STRING, number=1) - - -class Precondition(proto.Message): - r"""A precondition on a document, used for conditional - operations. - - Attributes: - exists (bool): - When set to ``true``, the target document must exist. When - set to ``false``, the target document must not exist. - update_time (~.timestamp.Timestamp): - When set, the target document must exist and - have been last updated at that time. - """ - - exists = proto.Field(proto.BOOL, number=1, oneof="condition_type") - - update_time = proto.Field( - proto.MESSAGE, number=2, oneof="condition_type", message=timestamp.Timestamp, - ) - - -class TransactionOptions(proto.Message): - r"""Options for creating a new transaction. - - Attributes: - read_only (~.common.TransactionOptions.ReadOnly): - The transaction can only be used for read - operations. - read_write (~.common.TransactionOptions.ReadWrite): - The transaction can be used for both read and - write operations. - """ - - class ReadWrite(proto.Message): - r"""Options for a transaction that can be used to read and write - documents. - - Attributes: - retry_transaction (bytes): - An optional transaction to retry. - """ - - retry_transaction = proto.Field(proto.BYTES, number=1) - - class ReadOnly(proto.Message): - r"""Options for a transaction that can only be used to read - documents. - - Attributes: - read_time (~.timestamp.Timestamp): - Reads documents at the given time. - This may not be older than 60 seconds. - """ - - read_time = proto.Field( - proto.MESSAGE, - number=2, - oneof="consistency_selector", - message=timestamp.Timestamp, - ) - - read_only = proto.Field(proto.MESSAGE, number=2, oneof="mode", message=ReadOnly,) - - read_write = proto.Field(proto.MESSAGE, number=3, oneof="mode", message=ReadWrite,) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1beta1/types/document.py b/google/cloud/firestore_v1beta1/types/document.py deleted file mode 100644 index cfcfc7e14..000000000 --- a/google/cloud/firestore_v1beta1/types/document.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import proto # type: ignore - - -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1beta1", - manifest={"Document", "Value", "ArrayValue", "MapValue",}, -) - - -class Document(proto.Message): - r"""A Firestore document. - Must not exceed 1 MiB - 4 bytes. - - Attributes: - name (str): - The resource name of the document, for example - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - fields (Sequence[~.document.Document.FieldsEntry]): - The document's fields. - - The map keys represent field names. - - A simple field name contains only characters ``a`` to ``z``, - ``A`` to ``Z``, ``0`` to ``9``, or ``_``, and must not start - with ``0`` to ``9``. For example, ``foo_bar_17``. - - Field names matching the regular expression ``__.*__`` are - reserved. Reserved field names are forbidden except in - certain documented contexts. The map keys, represented as - UTF-8, must not exceed 1,500 bytes and cannot be empty. - - Field paths may be used in other contexts to refer to - structured fields defined here. For ``map_value``, the field - path is represented by the simple or quoted field names of - the containing fields, delimited by ``.``. For example, the - structured field - ``"foo" : { map_value: { "x&y" : { string_value: "hello" }}}`` - would be represented by the field path ``foo.x&y``. - - Within a field path, a quoted field name starts and ends - with :literal:`\`` and may contain any character. Some - characters, including :literal:`\``, must be escaped using a - ``\``. For example, :literal:`\`x&y\`` represents ``x&y`` - and :literal:`\`bak\`tik\`` represents :literal:`bak`tik`. - create_time (~.timestamp.Timestamp): - Output only. The time at which the document was created. - - This value increases monotonically when a document is - deleted then recreated. It can also be compared to values - from other documents and the ``read_time`` of a query. - update_time (~.timestamp.Timestamp): - Output only. The time at which the document was last - changed. - - This value is initially set to the ``create_time`` then - increases monotonically with each change to the document. It - can also be compared to values from other documents and the - ``read_time`` of a query. - """ - - name = proto.Field(proto.STRING, number=1) - - fields = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message="Value",) - - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - -class Value(proto.Message): - r"""A message that can hold any of the supported value types. - - Attributes: - null_value (~.struct.NullValue): - A null value. - boolean_value (bool): - A boolean value. - integer_value (int): - An integer value. - double_value (float): - A double value. - timestamp_value (~.timestamp.Timestamp): - A timestamp value. - Precise only to microseconds. When stored, any - additional precision is rounded down. - string_value (str): - A string value. - The string, represented as UTF-8, must not - exceed 1 MiB - 89 bytes. Only the first 1,500 - bytes of the UTF-8 representation are considered - by queries. - bytes_value (bytes): - A bytes value. - Must not exceed 1 MiB - 89 bytes. - Only the first 1,500 bytes are considered by - queries. - reference_value (str): - A reference to a document. For example: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - geo_point_value (~.latlng.LatLng): - A geo point value representing a point on the - surface of Earth. - array_value (~.document.ArrayValue): - An array value. - Cannot directly contain another array value, - though can contain an map which contains another - array. - map_value (~.document.MapValue): - A map value. - """ - - null_value = proto.Field( - proto.ENUM, number=11, oneof="value_type", enum=struct.NullValue, - ) - - boolean_value = proto.Field(proto.BOOL, number=1, oneof="value_type") - - integer_value = proto.Field(proto.INT64, number=2, oneof="value_type") - - double_value = proto.Field(proto.DOUBLE, number=3, oneof="value_type") - - timestamp_value = proto.Field( - proto.MESSAGE, number=10, oneof="value_type", message=timestamp.Timestamp, - ) - - string_value = proto.Field(proto.STRING, number=17, oneof="value_type") - - bytes_value = proto.Field(proto.BYTES, number=18, oneof="value_type") - - reference_value = proto.Field(proto.STRING, number=5, oneof="value_type") - - geo_point_value = proto.Field( - proto.MESSAGE, number=8, oneof="value_type", message=latlng.LatLng, - ) - - array_value = proto.Field( - proto.MESSAGE, number=9, oneof="value_type", message="ArrayValue", - ) - - map_value = proto.Field( - proto.MESSAGE, number=6, oneof="value_type", message="MapValue", - ) - - -class ArrayValue(proto.Message): - r"""An array value. - - Attributes: - values (Sequence[~.document.Value]): - Values in the array. - """ - - values = proto.RepeatedField(proto.MESSAGE, number=1, message=Value,) - - -class MapValue(proto.Message): - r"""A map value. - - Attributes: - fields (Sequence[~.document.MapValue.FieldsEntry]): - The map's fields. - - The map keys represent field names. Field names matching the - regular expression ``__.*__`` are reserved. Reserved field - names are forbidden except in certain documented contexts. - The map keys, represented as UTF-8, must not exceed 1,500 - bytes and cannot be empty. - """ - - fields = proto.MapField(proto.STRING, proto.MESSAGE, number=1, message=Value,) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1beta1/types/firestore.py b/google/cloud/firestore_v1beta1/types/firestore.py deleted file mode 100644 index 47dc7cbf5..000000000 --- a/google/cloud/firestore_v1beta1/types/firestore.py +++ /dev/null @@ -1,916 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import proto # type: ignore - - -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import query as gf_query -from google.cloud.firestore_v1beta1.types import write -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1beta1", - manifest={ - "GetDocumentRequest", - "ListDocumentsRequest", - "ListDocumentsResponse", - "CreateDocumentRequest", - "UpdateDocumentRequest", - "DeleteDocumentRequest", - "BatchGetDocumentsRequest", - "BatchGetDocumentsResponse", - "BeginTransactionRequest", - "BeginTransactionResponse", - "CommitRequest", - "CommitResponse", - "RollbackRequest", - "RunQueryRequest", - "RunQueryResponse", - "WriteRequest", - "WriteResponse", - "ListenRequest", - "ListenResponse", - "Target", - "TargetChange", - "ListCollectionIdsRequest", - "ListCollectionIdsResponse", - }, -) - - -class GetDocumentRequest(proto.Message): - r"""The request for - [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. - - Attributes: - name (str): - Required. The resource name of the Document to get. In the - format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - mask (~.common.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - transaction (bytes): - Reads the document in a transaction. - read_time (~.timestamp.Timestamp): - Reads the version of the document at the - given time. This may not be older than 60 - seconds. - """ - - name = proto.Field(proto.STRING, number=1) - - mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=3, oneof="consistency_selector") - - read_time = proto.Field( - proto.MESSAGE, - number=5, - oneof="consistency_selector", - message=timestamp.Timestamp, - ) - - -class ListDocumentsRequest(proto.Message): - r"""The request for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - collection_id (str): - Required. The collection ID, relative to ``parent``, to - list. For example: ``chatrooms`` or ``messages``. - page_size (int): - The maximum number of documents to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - order_by (str): - The order to sort results by. For example: - ``priority desc, name``. - mask (~.common.DocumentMask): - The fields to return. If not set, returns all - fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Reads documents in a transaction. - read_time (~.timestamp.Timestamp): - Reads documents as they were at the given - time. This may not be older than 60 seconds. - show_missing (bool): - If the list should show missing documents. A missing - document is a document that does not exist but has - sub-documents. These documents will be returned with a key - but will not have fields, - [Document.create_time][google.firestore.v1beta1.Document.create_time], - or - [Document.update_time][google.firestore.v1beta1.Document.update_time] - set. - - Requests with ``show_missing`` may not specify ``where`` or - ``order_by``. - """ - - parent = proto.Field(proto.STRING, number=1) - - collection_id = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) - - page_token = proto.Field(proto.STRING, number=4) - - order_by = proto.Field(proto.STRING, number=6) - - mask = proto.Field(proto.MESSAGE, number=7, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=8, oneof="consistency_selector") - - read_time = proto.Field( - proto.MESSAGE, - number=10, - oneof="consistency_selector", - message=timestamp.Timestamp, - ) - - show_missing = proto.Field(proto.BOOL, number=12) - - -class ListDocumentsResponse(proto.Message): - r"""The response for - [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. - - Attributes: - documents (Sequence[~.gf_document.Document]): - The Documents found. - next_page_token (str): - The next page token. - """ - - @property - def raw_page(self): - return self - - documents = proto.RepeatedField( - proto.MESSAGE, number=1, message=gf_document.Document, - ) - - next_page_token = proto.Field(proto.STRING, number=2) - - -class CreateDocumentRequest(proto.Message): - r"""The request for - [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. - - Attributes: - parent (str): - Required. The parent resource. For example: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}`` - collection_id (str): - Required. The collection ID, relative to ``parent``, to - list. For example: ``chatrooms``. - document_id (str): - The client-assigned document ID to use for - this document. - Optional. If not specified, an ID will be - assigned by the service. - document (~.gf_document.Document): - Required. The document to create. ``name`` must not be set. - mask (~.common.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - """ - - parent = proto.Field(proto.STRING, number=1) - - collection_id = proto.Field(proto.STRING, number=2) - - document_id = proto.Field(proto.STRING, number=3) - - document = proto.Field(proto.MESSAGE, number=4, message=gf_document.Document,) - - mask = proto.Field(proto.MESSAGE, number=5, message=common.DocumentMask,) - - -class UpdateDocumentRequest(proto.Message): - r"""The request for - [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. - - Attributes: - document (~.gf_document.Document): - Required. The updated document. - Creates the document if it does not already - exist. - update_mask (~.common.DocumentMask): - The fields to update. - None of the field paths in the mask may contain - a reserved name. - If the document exists on the server and has - fields not referenced in the mask, they are left - unchanged. - Fields referenced in the mask, but not present - in the input document, are deleted from the - document on the server. - mask (~.common.DocumentMask): - The fields to return. If not set, returns all - fields. - If the document has a field that is not present - in this mask, that field will not be returned in - the response. - current_document (~.common.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=common.DocumentMask,) - - mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - - current_document = proto.Field( - proto.MESSAGE, number=4, message=common.Precondition, - ) - - -class DeleteDocumentRequest(proto.Message): - r"""The request for - [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. - - Attributes: - name (str): - Required. The resource name of the Document to delete. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - current_document (~.common.Precondition): - An optional precondition on the document. - The request will fail if this is set and not met - by the target document. - """ - - name = proto.Field(proto.STRING, number=1) - - current_document = proto.Field( - proto.MESSAGE, number=2, message=common.Precondition, - ) - - -class BatchGetDocumentsRequest(proto.Message): - r"""The request for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - documents (Sequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - mask (~.common.DocumentMask): - The fields to return. If not set, returns all - fields. - If a document has a field that is not present in - this mask, that field will not be returned in - the response. - transaction (bytes): - Reads documents in a transaction. - new_transaction (~.common.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - read_time (~.timestamp.Timestamp): - Reads documents as they were at the given - time. This may not be older than 60 seconds. - """ - - database = proto.Field(proto.STRING, number=1) - - documents = proto.RepeatedField(proto.STRING, number=2) - - mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - - transaction = proto.Field(proto.BYTES, number=4, oneof="consistency_selector") - - new_transaction = proto.Field( - proto.MESSAGE, - number=5, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - - read_time = proto.Field( - proto.MESSAGE, - number=7, - oneof="consistency_selector", - message=timestamp.Timestamp, - ) - - -class BatchGetDocumentsResponse(proto.Message): - r"""The streamed response for - [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. - - Attributes: - found (~.gf_document.Document): - A document that was requested. - missing (str): - A document name that was requested but does not exist. In - the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - transaction (bytes): - The transaction that was started as part of this request. - Will only be set in the first response, and only if - [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] - was set in the request. - read_time (~.timestamp.Timestamp): - The time at which the document was read. This may be - monotically increasing, in this case the previous documents - in the result stream are guaranteed not to have changed - between their read_time and this one. - """ - - found = proto.Field( - proto.MESSAGE, number=1, oneof="result", message=gf_document.Document, - ) - - missing = proto.Field(proto.STRING, number=2, oneof="result") - - transaction = proto.Field(proto.BYTES, number=3) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - options (~.common.TransactionOptions): - The options for the transaction. - Defaults to a read-write transaction. - """ - - database = proto.Field(proto.STRING, number=1) - - options = proto.Field(proto.MESSAGE, number=2, message=common.TransactionOptions,) - - -class BeginTransactionResponse(proto.Message): - r"""The response for - [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. - - Attributes: - transaction (bytes): - The transaction that was started. - """ - - transaction = proto.Field(proto.BYTES, number=1) - - -class CommitRequest(proto.Message): - r"""The request for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - writes (Sequence[~.write.Write]): - The writes to apply. - Always executed atomically and in order. - transaction (bytes): - If set, applies all writes in this - transaction, and commits it. - """ - - database = proto.Field(proto.STRING, number=1) - - writes = proto.RepeatedField(proto.MESSAGE, number=2, message=write.Write,) - - transaction = proto.Field(proto.BYTES, number=3) - - -class CommitResponse(proto.Message): - r"""The response for - [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. - - Attributes: - write_results (Sequence[~.write.WriteResult]): - The result of applying the writes. - This i-th write result corresponds to the i-th - write in the request. - commit_time (~.timestamp.Timestamp): - The time at which the commit occurred. - """ - - write_results = proto.RepeatedField( - proto.MESSAGE, number=1, message=write.WriteResult, - ) - - commit_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - -class RollbackRequest(proto.Message): - r"""The request for - [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - transaction (bytes): - Required. The transaction to roll back. - """ - - database = proto.Field(proto.STRING, number=1) - - transaction = proto.Field(proto.BYTES, number=2) - - -class RunQueryRequest(proto.Message): - r"""The request for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - Attributes: - parent (str): - Required. The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (~.gf_query.StructuredQuery): - A structured query. - transaction (bytes): - Reads documents in a transaction. - new_transaction (~.common.TransactionOptions): - Starts a new transaction and reads the - documents. Defaults to a read-only transaction. - The new transaction ID will be returned as the - first response in the stream. - read_time (~.timestamp.Timestamp): - Reads documents as they were at the given - time. This may not be older than 60 seconds. - """ - - parent = proto.Field(proto.STRING, number=1) - - structured_query = proto.Field( - proto.MESSAGE, number=2, oneof="query_type", message=gf_query.StructuredQuery, - ) - - transaction = proto.Field(proto.BYTES, number=5, oneof="consistency_selector") - - new_transaction = proto.Field( - proto.MESSAGE, - number=6, - oneof="consistency_selector", - message=common.TransactionOptions, - ) - - read_time = proto.Field( - proto.MESSAGE, - number=7, - oneof="consistency_selector", - message=timestamp.Timestamp, - ) - - -class RunQueryResponse(proto.Message): - r"""The response for - [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. - - Attributes: - transaction (bytes): - The transaction that was started as part of this request. - Can only be set in the first response, and only if - [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] - was set in the request. If set, no other fields will be set - in this response. - document (~.gf_document.Document): - A query result. - Not set when reporting partial progress. - read_time (~.timestamp.Timestamp): - The time at which the document was read. This may be - monotonically increasing; in this case, the previous - documents in the result stream are guaranteed not to have - changed between their ``read_time`` and this one. - - If the query returns no results, a response with - ``read_time`` and no ``document`` will be sent, and this - represents the time at which the query was run. - skipped_results (int): - The number of results that have been skipped - due to an offset between the last response and - the current response. - """ - - transaction = proto.Field(proto.BYTES, number=2) - - document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - - read_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - skipped_results = proto.Field(proto.INT32, number=4) - - -class WriteRequest(proto.Message): - r"""The request for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - - The first request creates a stream, or resumes an existing one from - a token. - - When creating a new stream, the server replies with a response - containing only an ID and a token, to use in the next request. - - When resuming a stream, the server first streams any responses later - than the given token, then a response containing only an up-to-date - token, to use in the next request. - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. This is - only required in the first message. - stream_id (str): - The ID of the write stream to resume. - This may only be set in the first message. When - left empty, a new write stream will be created. - writes (Sequence[~.write.Write]): - The writes to apply. - Always executed atomically and in order. - This must be empty on the first request. - This may be empty on the last request. - This must not be empty on all other requests. - stream_token (bytes): - A stream token that was previously sent by the server. - - The client should set this field to the token from the most - recent - [WriteResponse][google.firestore.v1beta1.WriteResponse] it - has received. This acknowledges that the client has received - responses up to this token. After sending this token, - earlier tokens may not be used anymore. - - The server may close the stream if there are too many - unacknowledged responses. - - Leave this field unset when creating a new stream. To resume - a stream at a specific point, set this field and the - ``stream_id`` field. - - Leave this field unset when creating a new stream. - labels (Sequence[~.firestore.WriteRequest.LabelsEntry]): - Labels associated with this write request. - """ - - database = proto.Field(proto.STRING, number=1) - - stream_id = proto.Field(proto.STRING, number=2) - - writes = proto.RepeatedField(proto.MESSAGE, number=3, message=write.Write,) - - stream_token = proto.Field(proto.BYTES, number=4) - - labels = proto.MapField(proto.STRING, proto.STRING, number=5) - - -class WriteResponse(proto.Message): - r"""The response for - [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. - - Attributes: - stream_id (str): - The ID of the stream. - Only set on the first message, when a new stream - was created. - stream_token (bytes): - A token that represents the position of this - response in the stream. This can be used by a - client to resume the stream at this point. - This field is always set. - write_results (Sequence[~.write.WriteResult]): - The result of applying the writes. - This i-th write result corresponds to the i-th - write in the request. - commit_time (~.timestamp.Timestamp): - The time at which the commit occurred. - """ - - stream_id = proto.Field(proto.STRING, number=1) - - stream_token = proto.Field(proto.BYTES, number=2) - - write_results = proto.RepeatedField( - proto.MESSAGE, number=3, message=write.WriteResult, - ) - - commit_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - -class ListenRequest(proto.Message): - r"""A request for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen] - - Attributes: - database (str): - Required. The database name. In the format: - ``projects/{project_id}/databases/{database_id}``. - add_target (~.firestore.Target): - A target to add to this stream. - remove_target (int): - The ID of a target to remove from this - stream. - labels (Sequence[~.firestore.ListenRequest.LabelsEntry]): - Labels associated with this target change. - """ - - database = proto.Field(proto.STRING, number=1) - - add_target = proto.Field( - proto.MESSAGE, number=2, oneof="target_change", message="Target", - ) - - remove_target = proto.Field(proto.INT32, number=3, oneof="target_change") - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) - - -class ListenResponse(proto.Message): - r"""The response for - [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. - - Attributes: - target_change (~.firestore.TargetChange): - Targets have changed. - document_change (~.write.DocumentChange): - A [Document][google.firestore.v1beta1.Document] has changed. - document_delete (~.write.DocumentDelete): - A [Document][google.firestore.v1beta1.Document] has been - deleted. - document_remove (~.write.DocumentRemove): - A [Document][google.firestore.v1beta1.Document] has been - removed from a target (because it is no longer relevant to - that target). - filter (~.write.ExistenceFilter): - A filter to apply to the set of documents - previously returned for the given target. - - Returned when documents may have been removed - from the given target, but the exact documents - are unknown. - """ - - target_change = proto.Field( - proto.MESSAGE, number=2, oneof="response_type", message="TargetChange", - ) - - document_change = proto.Field( - proto.MESSAGE, number=3, oneof="response_type", message=write.DocumentChange, - ) - - document_delete = proto.Field( - proto.MESSAGE, number=4, oneof="response_type", message=write.DocumentDelete, - ) - - document_remove = proto.Field( - proto.MESSAGE, number=6, oneof="response_type", message=write.DocumentRemove, - ) - - filter = proto.Field( - proto.MESSAGE, number=5, oneof="response_type", message=write.ExistenceFilter, - ) - - -class Target(proto.Message): - r"""A specification of a set of documents to listen to. - - Attributes: - query (~.firestore.Target.QueryTarget): - A target specified by a query. - documents (~.firestore.Target.DocumentsTarget): - A target specified by a set of document - names. - resume_token (bytes): - A resume token from a prior - [TargetChange][google.firestore.v1beta1.TargetChange] for an - identical target. - - Using a resume token with a different target is unsupported - and may fail. - read_time (~.timestamp.Timestamp): - Start listening after a specific ``read_time``. - - The client must know the state of matching documents at this - time. - target_id (int): - The target ID that identifies the target on - the stream. Must be a positive number and non- - zero. - once (bool): - If the target should be removed once it is - current and consistent. - """ - - class DocumentsTarget(proto.Message): - r"""A target specified by a set of documents names. - - Attributes: - documents (Sequence[str]): - The names of the documents to retrieve. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - The request will fail if any of the document is not a child - resource of the given ``database``. Duplicate names will be - elided. - """ - - documents = proto.RepeatedField(proto.STRING, number=2) - - class QueryTarget(proto.Message): - r"""A target specified by a query. - - Attributes: - parent (str): - The parent resource name. In the format: - ``projects/{project_id}/databases/{database_id}/documents`` - or - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents`` or - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - structured_query (~.gf_query.StructuredQuery): - A structured query. - """ - - parent = proto.Field(proto.STRING, number=1) - - structured_query = proto.Field( - proto.MESSAGE, - number=2, - oneof="query_type", - message=gf_query.StructuredQuery, - ) - - query = proto.Field( - proto.MESSAGE, number=2, oneof="target_type", message=QueryTarget, - ) - - documents = proto.Field( - proto.MESSAGE, number=3, oneof="target_type", message=DocumentsTarget, - ) - - resume_token = proto.Field(proto.BYTES, number=4, oneof="resume_type") - - read_time = proto.Field( - proto.MESSAGE, number=11, oneof="resume_type", message=timestamp.Timestamp, - ) - - target_id = proto.Field(proto.INT32, number=5) - - once = proto.Field(proto.BOOL, number=6) - - -class TargetChange(proto.Message): - r"""Targets being watched have changed. - - Attributes: - target_change_type (~.firestore.TargetChange.TargetChangeType): - The type of change that occurred. - target_ids (Sequence[int]): - The target IDs of targets that have changed. - If empty, the change applies to all targets. - - The order of the target IDs is not defined. - cause (~.status.Status): - The error that resulted in this change, if - applicable. - resume_token (bytes): - A token that can be used to resume the stream for the given - ``target_ids``, or all targets if ``target_ids`` is empty. - - Not set on every target change. - read_time (~.timestamp.Timestamp): - The consistent ``read_time`` for the given ``target_ids`` - (omitted when the target_ids are not at a consistent - snapshot). - - The stream is guaranteed to send a ``read_time`` with - ``target_ids`` empty whenever the entire stream reaches a - new consistent snapshot. ADD, CURRENT, and RESET messages - are guaranteed to (eventually) result in a new consistent - snapshot (while NO_CHANGE and REMOVE messages are not). - - For a given stream, ``read_time`` is guaranteed to be - monotonically increasing. - """ - - class TargetChangeType(proto.Enum): - r"""The type of change.""" - NO_CHANGE = 0 - ADD = 1 - REMOVE = 2 - CURRENT = 3 - RESET = 4 - - target_change_type = proto.Field(proto.ENUM, number=1, enum=TargetChangeType,) - - target_ids = proto.RepeatedField(proto.INT32, number=2) - - cause = proto.Field(proto.MESSAGE, number=3, message=status.Status,) - - resume_token = proto.Field(proto.BYTES, number=4) - - read_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - - -class ListCollectionIdsRequest(proto.Message): - r"""The request for - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - - Attributes: - parent (str): - Required. The parent document. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - For example: - ``projects/my-project/databases/my-database/documents/chatrooms/my-chatroom`` - page_size (int): - The maximum number of results to return. - page_token (str): - A page token. Must be a value from - [ListCollectionIdsResponse][google.firestore.v1beta1.ListCollectionIdsResponse]. - """ - - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - -class ListCollectionIdsResponse(proto.Message): - r"""The response from - [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. - - Attributes: - collection_ids (Sequence[str]): - The collection ids. - next_page_token (str): - A page token that may be used to continue the - list. - """ - - @property - def raw_page(self): - return self - - collection_ids = proto.RepeatedField(proto.STRING, number=1) - - next_page_token = proto.Field(proto.STRING, number=2) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1beta1/types/query.py b/google/cloud/firestore_v1beta1/types/query.py deleted file mode 100644 index d93c47a5e..000000000 --- a/google/cloud/firestore_v1beta1/types/query.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import proto # type: ignore - - -from google.cloud.firestore_v1beta1.types import document -from google.protobuf import wrappers_pb2 as wrappers # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1beta1", manifest={"StructuredQuery", "Cursor",}, -) - - -class StructuredQuery(proto.Message): - r"""A Firestore query. - - Attributes: - select (~.query.StructuredQuery.Projection): - The projection to return. - from_ (Sequence[~.query.StructuredQuery.CollectionSelector]): - The collections to query. - where (~.query.StructuredQuery.Filter): - The filter to apply. - order_by (Sequence[~.query.StructuredQuery.Order]): - The order to apply to the query results. - - Firestore guarantees a stable ordering through the following - rules: - - - Any field required to appear in ``order_by``, that is not - already specified in ``order_by``, is appended to the - order in field name order by default. - - If an order on ``__name__`` is not specified, it is - appended by default. - - Fields are appended with the same sort direction as the last - order specified, or 'ASCENDING' if no order was specified. - For example: - - - ``SELECT * FROM Foo ORDER BY A`` becomes - ``SELECT * FROM Foo ORDER BY A, __name__`` - - ``SELECT * FROM Foo ORDER BY A DESC`` becomes - ``SELECT * FROM Foo ORDER BY A DESC, __name__ DESC`` - - ``SELECT * FROM Foo WHERE A > 1`` becomes - ``SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__`` - start_at (~.query.Cursor): - A starting point for the query results. - end_at (~.query.Cursor): - A end point for the query results. - offset (int): - The number of results to skip. - Applies before limit, but after all other - constraints. Must be >= 0 if specified. - limit (~.wrappers.Int32Value): - The maximum number of results to return. - Applies after all other constraints. - Must be >= 0 if specified. - """ - - class Direction(proto.Enum): - r"""A sort direction.""" - DIRECTION_UNSPECIFIED = 0 - ASCENDING = 1 - DESCENDING = 2 - - class CollectionSelector(proto.Message): - r"""A selection of a collection, such as ``messages as m1``. - - Attributes: - collection_id (str): - The collection ID. - When set, selects only collections with this ID. - all_descendants (bool): - When false, selects only collections that are immediate - children of the ``parent`` specified in the containing - ``RunQueryRequest``. When true, selects all descendant - collections. - """ - - collection_id = proto.Field(proto.STRING, number=2) - - all_descendants = proto.Field(proto.BOOL, number=3) - - class Filter(proto.Message): - r"""A filter. - - Attributes: - composite_filter (~.query.StructuredQuery.CompositeFilter): - A composite filter. - field_filter (~.query.StructuredQuery.FieldFilter): - A filter on a document field. - unary_filter (~.query.StructuredQuery.UnaryFilter): - A filter that takes exactly one argument. - """ - - composite_filter = proto.Field( - proto.MESSAGE, - number=1, - oneof="filter_type", - message="StructuredQuery.CompositeFilter", - ) - - field_filter = proto.Field( - proto.MESSAGE, - number=2, - oneof="filter_type", - message="StructuredQuery.FieldFilter", - ) - - unary_filter = proto.Field( - proto.MESSAGE, - number=3, - oneof="filter_type", - message="StructuredQuery.UnaryFilter", - ) - - class CompositeFilter(proto.Message): - r"""A filter that merges multiple other filters using the given - operator. - - Attributes: - op (~.query.StructuredQuery.CompositeFilter.Operator): - The operator for combining multiple filters. - filters (Sequence[~.query.StructuredQuery.Filter]): - The list of filters to combine. - Must contain at least one filter. - """ - - class Operator(proto.Enum): - r"""A composite filter operator.""" - OPERATOR_UNSPECIFIED = 0 - AND = 1 - - op = proto.Field( - proto.ENUM, number=1, enum="StructuredQuery.CompositeFilter.Operator", - ) - - filters = proto.RepeatedField( - proto.MESSAGE, number=2, message="StructuredQuery.Filter", - ) - - class FieldFilter(proto.Message): - r"""A filter on a specific field. - - Attributes: - field (~.query.StructuredQuery.FieldReference): - The field to filter by. - op (~.query.StructuredQuery.FieldFilter.Operator): - The operator to filter by. - value (~.document.Value): - The value to compare to. - """ - - class Operator(proto.Enum): - r"""A field filter operator.""" - OPERATOR_UNSPECIFIED = 0 - LESS_THAN = 1 - LESS_THAN_OR_EQUAL = 2 - GREATER_THAN = 3 - GREATER_THAN_OR_EQUAL = 4 - EQUAL = 5 - ARRAY_CONTAINS = 7 - IN = 8 - ARRAY_CONTAINS_ANY = 9 - - field = proto.Field( - proto.MESSAGE, number=1, message="StructuredQuery.FieldReference", - ) - - op = proto.Field( - proto.ENUM, number=2, enum="StructuredQuery.FieldFilter.Operator", - ) - - value = proto.Field(proto.MESSAGE, number=3, message=document.Value,) - - class UnaryFilter(proto.Message): - r"""A filter with a single operand. - - Attributes: - op (~.query.StructuredQuery.UnaryFilter.Operator): - The unary operator to apply. - field (~.query.StructuredQuery.FieldReference): - The field to which to apply the operator. - """ - - class Operator(proto.Enum): - r"""A unary operator.""" - OPERATOR_UNSPECIFIED = 0 - IS_NAN = 2 - IS_NULL = 3 - - op = proto.Field( - proto.ENUM, number=1, enum="StructuredQuery.UnaryFilter.Operator", - ) - - field = proto.Field( - proto.MESSAGE, - number=2, - oneof="operand_type", - message="StructuredQuery.FieldReference", - ) - - class Order(proto.Message): - r"""An order on a field. - - Attributes: - field (~.query.StructuredQuery.FieldReference): - The field to order by. - direction (~.query.StructuredQuery.Direction): - The direction to order by. Defaults to ``ASCENDING``. - """ - - field = proto.Field( - proto.MESSAGE, number=1, message="StructuredQuery.FieldReference", - ) - - direction = proto.Field(proto.ENUM, number=2, enum="StructuredQuery.Direction",) - - class FieldReference(proto.Message): - r"""A reference to a field, such as ``max(messages.time) as max_time``. - - Attributes: - field_path (str): - - """ - - field_path = proto.Field(proto.STRING, number=2) - - class Projection(proto.Message): - r"""The projection of document's fields to return. - - Attributes: - fields (Sequence[~.query.StructuredQuery.FieldReference]): - The fields to return. - - If empty, all fields are returned. To only return the name - of the document, use ``['__name__']``. - """ - - fields = proto.RepeatedField( - proto.MESSAGE, number=2, message="StructuredQuery.FieldReference", - ) - - select = proto.Field(proto.MESSAGE, number=1, message=Projection,) - - from_ = proto.RepeatedField(proto.MESSAGE, number=2, message=CollectionSelector,) - - where = proto.Field(proto.MESSAGE, number=3, message=Filter,) - - order_by = proto.RepeatedField(proto.MESSAGE, number=4, message=Order,) - - start_at = proto.Field(proto.MESSAGE, number=7, message="Cursor",) - - end_at = proto.Field(proto.MESSAGE, number=8, message="Cursor",) - - offset = proto.Field(proto.INT32, number=6) - - limit = proto.Field(proto.MESSAGE, number=5, message=wrappers.Int32Value,) - - -class Cursor(proto.Message): - r"""A position in a query result set. - - Attributes: - values (Sequence[~.document.Value]): - The values that represent a position, in the - order they appear in the order by clause of a - query. - Can contain fewer values than specified in the - order by clause. - before (bool): - If the position is just before or just after - the given values, relative to the sort order - defined by the query. - """ - - values = proto.RepeatedField(proto.MESSAGE, number=1, message=document.Value,) - - before = proto.Field(proto.BOOL, number=2) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1beta1/types/write.py b/google/cloud/firestore_v1beta1/types/write.py deleted file mode 100644 index 9314010b4..000000000 --- a/google/cloud/firestore_v1beta1/types/write.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import proto # type: ignore - - -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - - -__protobuf__ = proto.module( - package="google.firestore.v1beta1", - manifest={ - "Write", - "DocumentTransform", - "WriteResult", - "DocumentChange", - "DocumentDelete", - "DocumentRemove", - "ExistenceFilter", - }, -) - - -class Write(proto.Message): - r"""A write on a document. - - Attributes: - update (~.gf_document.Document): - A document to write. - delete (str): - A document name to delete. In the format: - ``projects/{project_id}/databases/{database_id}/documents/{document_path}``. - transform (~.write.DocumentTransform): - Applies a transformation to a document. At most one - ``transform`` per document is allowed in a given request. An - ``update`` cannot follow a ``transform`` on the same - document in a given request. - update_mask (~.common.DocumentMask): - The fields to update in this write. - - This field can be set only when the operation is ``update``. - If the mask is not set for an ``update`` and the document - exists, any existing data will be overwritten. If the mask - is set and the document on the server has fields not covered - by the mask, they are left unchanged. Fields referenced in - the mask, but not present in the input document, are deleted - from the document on the server. The field paths in this - mask must not contain a reserved field name. - current_document (~.common.Precondition): - An optional precondition on the document. - The write will fail if this is set and not met - by the target document. - """ - - update = proto.Field( - proto.MESSAGE, number=1, oneof="operation", message=gf_document.Document, - ) - - delete = proto.Field(proto.STRING, number=2, oneof="operation") - - transform = proto.Field( - proto.MESSAGE, number=6, oneof="operation", message="DocumentTransform", - ) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=common.DocumentMask,) - - current_document = proto.Field( - proto.MESSAGE, number=4, message=common.Precondition, - ) - - -class DocumentTransform(proto.Message): - r"""A transformation of a document. - - Attributes: - document (str): - The name of the document to transform. - field_transforms (Sequence[~.write.DocumentTransform.FieldTransform]): - The list of transformations to apply to the - fields of the document, in order. - This must not be empty. - """ - - class FieldTransform(proto.Message): - r"""A transformation of a field of the document. - - Attributes: - field_path (str): - The path of the field. See - [Document.fields][google.firestore.v1beta1.Document.fields] - for the field path syntax reference. - set_to_server_value (~.write.DocumentTransform.FieldTransform.ServerValue): - Sets the field to the given server value. - increment (~.gf_document.Value): - Adds the given value to the field's current - value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If either - of the given value or the current field value - are doubles, both values will be interpreted as - doubles. Double arithmetic and representation of - double values follow IEEE 754 semantics. If - there is positive/negative integer overflow, the - field is resolved to the largest magnitude - positive/negative integer. - maximum (~.gf_document.Value): - Sets the field to the maximum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the given value. If a - maximum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the larger operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The maximum of a zero stored value and - zero input value is always the stored value. - The maximum of any numeric value x and NaN is - NaN. - minimum (~.gf_document.Value): - Sets the field to the minimum of its current - value and the given value. - This must be an integer or a double value. - If the field is not an integer or double, or if - the field does not yet exist, the transformation - will set the field to the input value. If a - minimum operation is applied where the field and - the input value are of mixed types (that is - - one is an integer and one is a double) the field - takes on the type of the smaller operand. If the - operands are equivalent (e.g. 3 and 3.0), the - field does not change. 0, 0.0, and -0.0 are all - zero. The minimum of a zero stored value and - zero input value is always the stored value. - The minimum of any numeric value x and NaN is - NaN. - append_missing_elements (~.gf_document.ArrayValue): - Append the given elements in order if they are not already - present in the current field value. If the field is not an - array, or if the field does not yet exist, it is first set - to the empty array. - - Equivalent numbers of different types (e.g. 3L and 3.0) are - considered equal when checking if a value is missing. NaN is - equal to NaN, and Null is equal to Null. If the input - contains multiple equivalent values, only the first will be - considered. - - The corresponding transform_result will be the null value. - remove_all_from_array (~.gf_document.ArrayValue): - Remove all of the given elements from the array in the - field. If the field is not an array, or if the field does - not yet exist, it is set to the empty array. - - Equivalent numbers of the different types (e.g. 3L and 3.0) - are considered equal when deciding whether an element should - be removed. NaN is equal to NaN, and Null is equal to Null. - This will remove all equivalent values if there are - duplicates. - - The corresponding transform_result will be the null value. - """ - - class ServerValue(proto.Enum): - r"""A value that is calculated by the server.""" - SERVER_VALUE_UNSPECIFIED = 0 - REQUEST_TIME = 1 - - field_path = proto.Field(proto.STRING, number=1) - - set_to_server_value = proto.Field( - proto.ENUM, - number=2, - oneof="transform_type", - enum="DocumentTransform.FieldTransform.ServerValue", - ) - - increment = proto.Field( - proto.MESSAGE, number=3, oneof="transform_type", message=gf_document.Value, - ) - - maximum = proto.Field( - proto.MESSAGE, number=4, oneof="transform_type", message=gf_document.Value, - ) - - minimum = proto.Field( - proto.MESSAGE, number=5, oneof="transform_type", message=gf_document.Value, - ) - - append_missing_elements = proto.Field( - proto.MESSAGE, - number=6, - oneof="transform_type", - message=gf_document.ArrayValue, - ) - - remove_all_from_array = proto.Field( - proto.MESSAGE, - number=7, - oneof="transform_type", - message=gf_document.ArrayValue, - ) - - document = proto.Field(proto.STRING, number=1) - - field_transforms = proto.RepeatedField( - proto.MESSAGE, number=2, message=FieldTransform, - ) - - -class WriteResult(proto.Message): - r"""The result of applying a write. - - Attributes: - update_time (~.timestamp.Timestamp): - The last update time of the document after applying the - write. Not set after a ``delete``. - - If the write did not actually change the document, this will - be the previous update_time. - transform_results (Sequence[~.gf_document.Value]): - The results of applying each - [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], - in the same order. - """ - - update_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - transform_results = proto.RepeatedField( - proto.MESSAGE, number=2, message=gf_document.Value, - ) - - -class DocumentChange(proto.Message): - r"""A [Document][google.firestore.v1beta1.Document] has changed. - - May be the result of multiple - [writes][google.firestore.v1beta1.Write], including deletes, that - ultimately resulted in a new value for the - [Document][google.firestore.v1beta1.Document]. - - Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] - messages may be returned for the same logical change, if multiple - targets are affected. - - Attributes: - document (~.gf_document.Document): - The new state of the - [Document][google.firestore.v1beta1.Document]. - - If ``mask`` is set, contains only fields that were updated - or added. - target_ids (Sequence[int]): - A set of target IDs of targets that match - this document. - removed_target_ids (Sequence[int]): - A set of target IDs for targets that no - longer match this document. - """ - - document = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,) - - target_ids = proto.RepeatedField(proto.INT32, number=5) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=6) - - -class DocumentDelete(proto.Message): - r"""A [Document][google.firestore.v1beta1.Document] has been deleted. - - May be the result of multiple - [writes][google.firestore.v1beta1.Write], including updates, the - last of which deleted the - [Document][google.firestore.v1beta1.Document]. - - Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] - messages may be returned for the same logical delete, if multiple - targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1beta1.Document] that was - deleted. - removed_target_ids (Sequence[int]): - A set of target IDs for targets that - previously matched this entity. - read_time (~.timestamp.Timestamp): - The read timestamp at which the delete was observed. - - Greater or equal to the ``commit_time`` of the delete. - """ - - document = proto.Field(proto.STRING, number=1) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=6) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - -class DocumentRemove(proto.Message): - r"""A [Document][google.firestore.v1beta1.Document] has been removed - from the view of the targets. - - Sent if the document is no longer relevant to a target and is out of - view. Can be sent instead of a DocumentDelete or a DocumentChange if - the server can not send the new value of the document. - - Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] - messages may be returned for the same logical write or delete, if - multiple targets are affected. - - Attributes: - document (str): - The resource name of the - [Document][google.firestore.v1beta1.Document] that has gone - out of view. - removed_target_ids (Sequence[int]): - A set of target IDs for targets that - previously matched this document. - read_time (~.timestamp.Timestamp): - The read timestamp at which the remove was observed. - - Greater or equal to the ``commit_time`` of the - change/delete/remove. - """ - - document = proto.Field(proto.STRING, number=1) - - removed_target_ids = proto.RepeatedField(proto.INT32, number=2) - - read_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - -class ExistenceFilter(proto.Message): - r"""A digest of all the documents that match a given target. - - Attributes: - target_id (int): - The target ID to which this filter applies. - count (int): - The total count of documents that match - [target_id][google.firestore.v1beta1.ExistenceFilter.target_id]. - - If different from the count of documents in the client that - match, the client must manually determine which documents no - longer match the target. - """ - - target_id = proto.Field(proto.INT32, number=1) - - count = proto.Field(proto.INT32, number=2) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/firestore_v1beta1/watch.py b/google/cloud/firestore_v1beta1/watch.py deleted file mode 100644 index fe639cc4d..000000000 --- a/google/cloud/firestore_v1beta1/watch.py +++ /dev/null @@ -1,723 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import collections -import threading -import datetime -from enum import Enum -import functools - -import pytz - -from google.api_core.bidi import ResumableBidiRpc -from google.api_core.bidi import BackgroundConsumer -from google.cloud.firestore_v1beta1.types import firestore -from google.cloud.firestore_v1beta1 import _helpers - -from google.api_core import exceptions - -import grpc - -"""Python client for Google Cloud Firestore Watch.""" - -_LOGGER = logging.getLogger(__name__) - -WATCH_TARGET_ID = 0x5079 # "Py" - -GRPC_STATUS_CODE = { - "OK": 0, - "CANCELLED": 1, - "UNKNOWN": 2, - "INVALID_ARGUMENT": 3, - "DEADLINE_EXCEEDED": 4, - "NOT_FOUND": 5, - "ALREADY_EXISTS": 6, - "PERMISSION_DENIED": 7, - "UNAUTHENTICATED": 16, - "RESOURCE_EXHAUSTED": 8, - "FAILED_PRECONDITION": 9, - "ABORTED": 10, - "OUT_OF_RANGE": 11, - "UNIMPLEMENTED": 12, - "INTERNAL": 13, - "UNAVAILABLE": 14, - "DATA_LOSS": 15, - "DO_NOT_USE": -1, -} -_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" -_RETRYABLE_STREAM_ERRORS = ( - exceptions.DeadlineExceeded, - exceptions.ServiceUnavailable, - exceptions.InternalServerError, - exceptions.Unknown, - exceptions.GatewayTimeout, -) - -DocTreeEntry = collections.namedtuple("DocTreeEntry", ["value", "index"]) - - -class WatchDocTree(object): - # TODO: Currently this uses a dict. Other implementations us an rbtree. - # The performance of this implementation should be investigated and may - # require modifying the underlying datastructure to a rbtree. - def __init__(self): - self._dict = {} - self._index = 0 - - def keys(self): - return list(self._dict.keys()) - - def _copy(self): - wdt = WatchDocTree() - wdt._dict = self._dict.copy() - wdt._index = self._index - self = wdt - return self - - def insert(self, key, value): - self = self._copy() - self._dict[key] = DocTreeEntry(value, self._index) - self._index += 1 - return self - - def find(self, key): - return self._dict[key] - - def remove(self, key): - self = self._copy() - del self._dict[key] - return self - - def __iter__(self): - for k in self._dict: - yield k - - def __len__(self): - return len(self._dict) - - def __contains__(self, k): - return k in self._dict - - -class ChangeType(Enum): - ADDED = 1 - REMOVED = 2 - MODIFIED = 3 - - -class DocumentChange(object): - def __init__(self, type, document, old_index, new_index): - """DocumentChange - - Args: - type (ChangeType): - document (document.DocumentSnapshot): - old_index (int): - new_index (int): - """ - # TODO: spec indicated an isEqual param also - self.type = type - self.document = document - self.old_index = old_index - self.new_index = new_index - - -class WatchResult(object): - def __init__(self, snapshot, name, change_type): - self.snapshot = snapshot - self.name = name - self.change_type = change_type - - -def _maybe_wrap_exception(exception): - """Wraps a gRPC exception class, if needed.""" - if isinstance(exception, grpc.RpcError): - return exceptions.from_grpc_error(exception) - return exception - - -def document_watch_comparator(doc1, doc2): - assert doc1 == doc2, "Document watches only support one document." - return 0 - - -class Watch(object): - - BackgroundConsumer = BackgroundConsumer # FBO unit tests - ResumableBidiRpc = ResumableBidiRpc # FBO unit tests - - def __init__( - self, - document_reference, - firestore, - target, - comparator, - snapshot_callback, - document_snapshot_cls, - document_reference_cls, - BackgroundConsumer=None, # FBO unit testing - ResumableBidiRpc=None, # FBO unit testing - ): - """ - Args: - firestore: - target: - comparator: - snapshot_callback: Callback method to process snapshots. - Args: - docs (List(DocumentSnapshot)): A callback that returns the - ordered list of documents stored in this snapshot. - changes (List(str)): A callback that returns the list of - changed documents since the last snapshot delivered for - this watch. - read_time (string): The ISO 8601 time at which this - snapshot was obtained. - - document_snapshot_cls: instance of DocumentSnapshot - document_reference_cls: instance of DocumentReference - """ - self._document_reference = document_reference - self._firestore = firestore - self._api = firestore._firestore_api - self._targets = target - self._comparator = comparator - self.DocumentSnapshot = document_snapshot_cls - self.DocumentReference = document_reference_cls - self._snapshot_callback = snapshot_callback - self._closing = threading.Lock() - self._closed = False - - def should_recover(exc): # pragma: NO COVER - return ( - isinstance(exc, grpc.RpcError) - and exc.code() == grpc.StatusCode.UNAVAILABLE - ) - - initial_request = firestore.ListenRequest( - database=self._firestore._database_string, add_target=self._targets - ) - - if ResumableBidiRpc is None: - ResumableBidiRpc = self.ResumableBidiRpc # FBO unit tests - - self._rpc = ResumableBidiRpc( - self._api._transport.listen, - initial_request=initial_request, - should_recover=should_recover, - metadata=self._firestore._rpc_metadata, - ) - - self._rpc.add_done_callback(self._on_rpc_done) - - # Initialize state for on_snapshot - # The sorted tree of QueryDocumentSnapshots as sent in the last - # snapshot. We only look at the keys. - self.doc_tree = WatchDocTree() - - # A map of document names to QueryDocumentSnapshots for the last sent - # snapshot. - self.doc_map = {} - - # The accumulates map of document changes (keyed by document name) for - # the current snapshot. - self.change_map = {} - - # The current state of the query results. - self.current = False - - # We need this to track whether we've pushed an initial set of changes, - # since we should push those even when there are no changes, if there - # aren't docs. - self.has_pushed = False - - # The server assigns and updates the resume token. - self.resume_token = None - if BackgroundConsumer is None: # FBO unit tests - BackgroundConsumer = self.BackgroundConsumer - - self._consumer = BackgroundConsumer(self._rpc, self.on_snapshot) - self._consumer.start() - - @property - def is_active(self): - """bool: True if this manager is actively streaming. - - Note that ``False`` does not indicate this is complete shut down, - just that it stopped getting new messages. - """ - return self._consumer is not None and self._consumer.is_active - - def close(self, reason=None): - """Stop consuming messages and shutdown all helper threads. - - This method is idempotent. Additional calls will have no effect. - - Args: - reason (Any): The reason to close this. If None, this is considered - an "intentional" shutdown. - """ - with self._closing: - if self._closed: - return - - # Stop consuming messages. - if self.is_active: - _LOGGER.debug("Stopping consumer.") - self._consumer.stop() - self._consumer = None - - self._rpc.close() - self._rpc = None - self._closed = True - _LOGGER.debug("Finished stopping manager.") - - if reason: - # Raise an exception if a reason is provided - _LOGGER.debug("reason for closing: %s" % reason) - if isinstance(reason, Exception): - raise reason - raise RuntimeError(reason) - - def _on_rpc_done(self, future): - """Triggered whenever the underlying RPC terminates without recovery. - - This is typically triggered from one of two threads: the background - consumer thread (when calling ``recv()`` produces a non-recoverable - error) or the grpc management thread (when cancelling the RPC). - - This method is *non-blocking*. It will start another thread to deal - with shutting everything down. This is to prevent blocking in the - background consumer and preventing it from being ``joined()``. - """ - _LOGGER.info("RPC termination has signaled manager shutdown.") - future = _maybe_wrap_exception(future) - thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} - ) - thread.daemon = True - thread.start() - - def unsubscribe(self): - self.close() - - @classmethod - def for_document( - cls, - document_ref, - snapshot_callback, - snapshot_class_instance, - reference_class_instance, - ): - """ - Creates a watch snapshot listener for a document. snapshot_callback - receives a DocumentChange object, but may also start to get - targetChange and such soon - - Args: - document_ref: Reference to Document - snapshot_callback: callback to be called on snapshot - snapshot_class_instance: instance of DocumentSnapshot to make - snapshots with to pass to snapshot_callback - reference_class_instance: instance of DocumentReference to make - references - - """ - return cls( - document_ref, - document_ref._client, - { - "documents": {"documents": [document_ref._document_path]}, - "target_id": WATCH_TARGET_ID, - }, - document_watch_comparator, - snapshot_callback, - snapshot_class_instance, - reference_class_instance, - ) - - @classmethod - def for_query( - cls, query, snapshot_callback, snapshot_class_instance, reference_class_instance - ): - query_target = firestore.Target.QueryTarget( - parent=query._client._database_string, structured_query=query._to_protobuf() - ) - - return cls( - query, - query._client, - {"query": query_target, "target_id": WATCH_TARGET_ID}, - query._comparator, - snapshot_callback, - snapshot_class_instance, - reference_class_instance, - ) - - def _on_snapshot_target_change_no_change(self, proto): - _LOGGER.debug("on_snapshot: target change: NO_CHANGE") - change = proto.target_change - - no_target_ids = change.target_ids is None or len(change.target_ids) == 0 - if no_target_ids and change.read_time and self.current: - # TargetChange.TargetChangeType.CURRENT followed by - # TargetChange.TargetChangeType.NO_CHANGE - # signals a consistent state. Invoke the onSnapshot - # callback as specified by the user. - self.push(change.read_time, change.resume_token) - - def _on_snapshot_target_change_add(self, proto): - _LOGGER.debug("on_snapshot: target change: ADD") - target_id = proto.target_change.target_ids[0] - if target_id != WATCH_TARGET_ID: - raise RuntimeError("Unexpected target ID %s sent by server" % target_id) - - def _on_snapshot_target_change_remove(self, proto): - _LOGGER.debug("on_snapshot: target change: REMOVE") - change = proto.target_change - - code = 13 - message = "internal error" - if change.cause: - code = change.cause.code - message = change.cause.message - - message = "Error %s: %s" % (code, message) - - raise RuntimeError(message) - - def _on_snapshot_target_change_reset(self, proto): - # Whatever changes have happened so far no longer matter. - _LOGGER.debug("on_snapshot: target change: RESET") - self._reset_docs() - - def _on_snapshot_target_change_current(self, proto): - _LOGGER.debug("on_snapshot: target change: CURRENT") - self.current = True - - def on_snapshot(self, proto): - """ - Called everytime there is a response from listen. Collect changes - and 'push' the changes in a batch to the customer when we receive - 'current' from the listen response. - - Args: - listen_response(`google.cloud.firestore_v1beta1.types.ListenResponse`): - Callback method that receives a object to - """ - TargetChange = firestore.TargetChange - - target_changetype_dispatch = { - TargetChange.TargetChangeType.NO_CHANGE: self._on_snapshot_target_change_no_change, - TargetChange.TargetChangeType.ADD: self._on_snapshot_target_change_add, - TargetChange.TargetChangeType.REMOVE: self._on_snapshot_target_change_remove, - TargetChange.TargetChangeType.RESET: self._on_snapshot_target_change_reset, - TargetChange.TargetChangeType.CURRENT: self._on_snapshot_target_change_current, - } - - target_change = proto.target_change - if str(target_change): - target_change_type = target_change.target_change_type - _LOGGER.debug("on_snapshot: target change: " + str(target_change_type)) - meth = target_changetype_dispatch.get(target_change_type) - if meth is None: - _LOGGER.info( - "on_snapshot: Unknown target change " + str(target_change_type) - ) - self.close( - reason="Unknown target change type: %s " % str(target_change_type) - ) - else: - try: - meth(proto) - except Exception as exc2: - _LOGGER.debug("meth(proto) exc: " + str(exc2)) - raise - - # NOTE: - # in other implementations, such as node, the backoff is reset here - # in this version bidi rpc is just used and will control this. - - elif str(proto.document_change): - _LOGGER.debug("on_snapshot: document change") - - # No other target_ids can show up here, but we still need to see - # if the targetId was in the added list or removed list. - target_ids = proto.document_change.target_ids or [] - removed_target_ids = proto.document_change.removed_target_ids or [] - changed = False - removed = False - - if WATCH_TARGET_ID in target_ids: - changed = True - - if WATCH_TARGET_ID in removed_target_ids: - removed = True - - if changed: - _LOGGER.debug("on_snapshot: document change: CHANGED") - - # google.cloud.firestore_v1beta1.types.DocumentChange - document_change = proto.document_change - # google.cloud.firestore_v1beta1.types.Document - document = document_change.document - - data = _helpers.decode_dict(document.fields, self._firestore) - - # Create a snapshot. As Document and Query objects can be - # passed we need to get a Document Reference in a more manual - # fashion than self._document_reference - document_name = document.name - db_str = self._firestore._database_string - db_str_documents = db_str + "/documents/" - if document_name.startswith(db_str_documents): - document_name = document_name[len(db_str_documents) :] - - document_ref = self._firestore.document(document_name) - - snapshot = self.DocumentSnapshot( - reference=document_ref, - data=data, - exists=True, - read_time=None, - create_time=document.create_time, - update_time=document.update_time, - ) - self.change_map[document.name] = snapshot - - elif removed: - _LOGGER.debug("on_snapshot: document change: REMOVED") - document = proto.document_change.document - self.change_map[document.name] = ChangeType.REMOVED - - # NB: document_delete and document_remove (as far as we, the client, - # are concerned) are functionally equivalent - - elif str(proto.document_delete): - _LOGGER.debug("on_snapshot: document change: DELETE") - name = proto.document_delete.document - self.change_map[name] = ChangeType.REMOVED - - elif str(proto.document_remove): - _LOGGER.debug("on_snapshot: document change: REMOVE") - name = proto.document_remove.document - self.change_map[name] = ChangeType.REMOVED - - elif proto.filter: - _LOGGER.debug("on_snapshot: filter update") - if proto.filter.count != self._current_size(): - # We need to remove all the current results. - self._reset_docs() - # The filter didn't match, so re-issue the query. - # TODO: reset stream method? - # self._reset_stream(); - - else: - _LOGGER.debug("UNKNOWN TYPE. UHOH") - self.close(reason=ValueError("Unknown listen response type: %s" % proto)) - - def push(self, read_time, next_resume_token): - """ - Assembles a new snapshot from the current set of changes and invokes - the user's callback. Clears the current changes on completion. - """ - deletes, adds, updates = Watch._extract_changes( - self.doc_map, self.change_map, read_time - ) - - updated_tree, updated_map, appliedChanges = self._compute_snapshot( - self.doc_tree, self.doc_map, deletes, adds, updates - ) - - if not self.has_pushed or len(appliedChanges): - # TODO: It is possible in the future we will have the tree order - # on insert. For now, we sort here. - key = functools.cmp_to_key(self._comparator) - keys = sorted(updated_tree.keys(), key=key) - - self._snapshot_callback( - keys, - appliedChanges, - datetime.datetime.fromtimestamp(read_time.seconds, pytz.utc), - ) - self.has_pushed = True - - self.doc_tree = updated_tree - self.doc_map = updated_map - self.change_map.clear() - self.resume_token = next_resume_token - - @staticmethod - def _extract_changes(doc_map, changes, read_time): - deletes = [] - adds = [] - updates = [] - - for name, value in changes.items(): - if value == ChangeType.REMOVED: - if name in doc_map: - deletes.append(name) - elif name in doc_map: - if read_time is not None: - value.read_time = read_time - updates.append(value) - else: - if read_time is not None: - value.read_time = read_time - adds.append(value) - - return (deletes, adds, updates) - - def _compute_snapshot( - self, doc_tree, doc_map, delete_changes, add_changes, update_changes - ): - updated_tree = doc_tree - updated_map = doc_map - - assert len(doc_tree) == len(doc_map), ( - "The document tree and document map should have the same " - + "number of entries." - ) - - def delete_doc(name, updated_tree, updated_map): - """ - Applies a document delete to the document tree and document map. - Returns the corresponding DocumentChange event. - """ - assert name in updated_map, "Document to delete does not exist" - old_document = updated_map.get(name) - # TODO: If a document doesn't exist this raises IndexError. Handle? - existing = updated_tree.find(old_document) - old_index = existing.index - updated_tree = updated_tree.remove(old_document) - del updated_map[name] - return ( - DocumentChange(ChangeType.REMOVED, old_document, old_index, -1), - updated_tree, - updated_map, - ) - - def add_doc(new_document, updated_tree, updated_map): - """ - Applies a document add to the document tree and the document map. - Returns the corresponding DocumentChange event. - """ - name = new_document.reference._document_path - assert name not in updated_map, "Document to add already exists" - updated_tree = updated_tree.insert(new_document, None) - new_index = updated_tree.find(new_document).index - updated_map[name] = new_document - return ( - DocumentChange(ChangeType.ADDED, new_document, -1, new_index), - updated_tree, - updated_map, - ) - - def modify_doc(new_document, updated_tree, updated_map): - """ - Applies a document modification to the document tree and the - document map. - Returns the DocumentChange event for successful modifications. - """ - name = new_document.reference._document_path - assert name in updated_map, "Document to modify does not exist" - old_document = updated_map.get(name) - if old_document.update_time != new_document.update_time: - remove_change, updated_tree, updated_map = delete_doc( - name, updated_tree, updated_map - ) - add_change, updated_tree, updated_map = add_doc( - new_document, updated_tree, updated_map - ) - return ( - DocumentChange( - ChangeType.MODIFIED, - new_document, - remove_change.old_index, - add_change.new_index, - ), - updated_tree, - updated_map, - ) - - return None, updated_tree, updated_map - - # Process the sorted changes in the order that is expected by our - # clients (removals, additions, and then modifications). We also need - # to sort the individual changes to assure that old_index/new_index - # keep incrementing. - appliedChanges = [] - - key = functools.cmp_to_key(self._comparator) - - # Deletes are sorted based on the order of the existing document. - delete_changes = sorted(delete_changes, key=key) - for name in delete_changes: - change, updated_tree, updated_map = delete_doc( - name, updated_tree, updated_map - ) - appliedChanges.append(change) - - add_changes = sorted(add_changes, key=key) - _LOGGER.debug("walk over add_changes") - for snapshot in add_changes: - _LOGGER.debug("in add_changes") - change, updated_tree, updated_map = add_doc( - snapshot, updated_tree, updated_map - ) - appliedChanges.append(change) - - update_changes = sorted(update_changes, key=key) - for snapshot in update_changes: - change, updated_tree, updated_map = modify_doc( - snapshot, updated_tree, updated_map - ) - if change is not None: - appliedChanges.append(change) - - assert len(updated_tree) == len(updated_map), ( - "The update document " - + "tree and document map should have the same number of entries." - ) - return (updated_tree, updated_map, appliedChanges) - - def _affects_target(self, target_ids, current_id): - if target_ids is None: - return True - - return current_id in target_ids - - def _current_size(self): - """ - Returns the current count of all documents, including the changes from - the current changeMap. - """ - deletes, adds, _ = Watch._extract_changes(self.doc_map, self.change_map, None) - return len(self.doc_map) + len(adds) - len(deletes) - - def _reset_docs(self): - """ - Helper to clear the docs on RESET or filter mismatch. - """ - _LOGGER.debug("resetting documents") - self.change_map.clear() - self.resume_token = None - - # Mark each document as deleted. If documents are not deleted - # they will be sent again by the server. - for snapshot in self.doc_tree.keys(): - name = snapshot.reference._document_path - self.change_map[name] = ChangeType.REMOVED - - self.current = False diff --git a/tests/unit/gapic/firestore_v1beta1/test_firestore_v1beta1.py b/tests/unit/gapic/firestore_v1beta1/test_firestore_v1beta1.py deleted file mode 100644 index 350879528..000000000 --- a/tests/unit/gapic/firestore_v1beta1/test_firestore_v1beta1.py +++ /dev/null @@ -1,2632 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import os -import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - -from google import auth -from google.api_core import client_options -from google.api_core import exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.firestore_v1beta1.services.firestore import FirestoreAsyncClient -from google.cloud.firestore_v1beta1.services.firestore import FirestoreClient -from google.cloud.firestore_v1beta1.services.firestore import pagers -from google.cloud.firestore_v1beta1.services.firestore import transports -from google.cloud.firestore_v1beta1.types import common -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import document as gf_document -from google.cloud.firestore_v1beta1.types import firestore -from google.cloud.firestore_v1beta1.types import query -from google.cloud.firestore_v1beta1.types import write -from google.cloud.firestore_v1beta1.types import write as gf_write -from google.oauth2 import service_account -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore -from google.type import latlng_pb2 as latlng # type: ignore - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FirestoreClient._get_default_mtls_endpoint(None) is None - assert FirestoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ( - FirestoreClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - FirestoreClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - FirestoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert FirestoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [FirestoreClient, FirestoreAsyncClient]) -def test_firestore_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds - - assert client._transport._host == "firestore.googleapis.com:443" - - -def test_firestore_client_get_transport_class(): - transport = FirestoreClient.get_transport_class() - assert transport == transports.FirestoreGrpcTransport - - transport = FirestoreClient.get_transport_class("grpc") - assert transport == transports.FirestoreGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_firestore_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FirestoreClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FirestoreClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = client_class() - - del os.environ["GOOGLE_API_USE_MTLS"] - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_firestore_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (FirestoreClient, transports.FirestoreGrpcTransport, "grpc"), - ( - FirestoreAsyncClient, - transports.FirestoreGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_firestore_client_client_options_credentials_file( - client_class, transport_class, transport_name -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) - - -def test_firestore_client_client_options_from_dict(): - with mock.patch( - "google.cloud.firestore_v1beta1.services.firestore.transports.FirestoreGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = FirestoreClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, - ) - - -def test_get_document(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.GetDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document(name="name_value",) - - response = client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_get_document_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.GetDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document(name="name_value",) - ) - - response = await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - - assert response.name == "name_value" - - -def test_get_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_document), "__call__") as call: - call.return_value = document.Document() - - client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.GetDocumentRequest() - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_document), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - - await client.get_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_list_documents(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListDocumentsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_documents), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListDocumentsResponse( - next_page_token="next_page_token_value", - ) - - response = client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_documents_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListDocumentsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse(next_page_token="next_page_token_value",) - ) - - response = await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsAsyncPager) - - assert response.next_page_token == "next_page_token_value" - - -def test_list_documents_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_documents), "__call__") as call: - call.return_value = firestore.ListDocumentsResponse() - - client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_documents_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListDocumentsRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_documents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListDocumentsResponse() - ) - - await client.list_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_list_documents_pager(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_documents), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse(documents=[], next_page_token="def",), - firestore.ListDocumentsResponse( - documents=[document.Document(),], next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[document.Document(), document.Document(),], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_documents(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - -def test_list_documents_pages(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials,) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_documents), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse(documents=[], next_page_token="def",), - firestore.ListDocumentsResponse( - documents=[document.Document(),], next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[document.Document(), document.Document(),], - ), - RuntimeError, - ) - pages = list(client.list_documents(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_documents_async_pager(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_documents), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse(documents=[], next_page_token="def",), - firestore.ListDocumentsResponse( - documents=[document.Document(),], next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[document.Document(), document.Document(),], - ), - RuntimeError, - ) - async_pager = await client.list_documents(request={},) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, document.Document) for i in responses) - - -@pytest.mark.asyncio -async def test_list_documents_async_pages(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials,) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_documents), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - firestore.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - firestore.ListDocumentsResponse(documents=[], next_page_token="def",), - firestore.ListDocumentsResponse( - documents=[document.Document(),], next_page_token="ghi", - ), - firestore.ListDocumentsResponse( - documents=[document.Document(), document.Document(),], - ), - RuntimeError, - ) - pages = [] - async for page in (await client.list_documents(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token - - -def test_create_document(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.CreateDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = document.Document(name="name_value",) - - response = client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_create_document_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.CreateDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - document.Document(name="name_value",) - ) - - response = await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - - assert response.name == "name_value" - - -def test_create_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_document), "__call__") as call: - call.return_value = document.Document() - - client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CreateDocumentRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_document), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(document.Document()) - - await client.create_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_update_document(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.UpdateDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document(name="name_value",) - - response = client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_update_document_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.UpdateDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document(name="name_value",) - ) - - response = await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gf_document.Document) - - assert response.name == "name_value" - - -def test_update_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - request.document.name = "document.name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_document), "__call__") as call: - call.return_value = gf_document.Document() - - client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "document.name=document.name/value",) in kw[ - "metadata" - ] - - -@pytest.mark.asyncio -async def test_update_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.UpdateDocumentRequest() - request.document.name = "document.name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_document), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document() - ) - - await client.update_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "document.name=document.name/value",) in kw[ - "metadata" - ] - - -def test_update_document_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_document( - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].document == gf_document.Document(name="name_value") - - assert args[0].update_mask == common.DocumentMask( - field_paths=["field_paths_value"] - ) - - -def test_update_document_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_document_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = gf_document.Document() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gf_document.Document() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_document( - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].document == gf_document.Document(name="name_value") - - assert args[0].update_mask == common.DocumentMask( - field_paths=["field_paths_value"] - ) - - -@pytest.mark.asyncio -async def test_update_document_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_document( - firestore.UpdateDocumentRequest(), - document=gf_document.Document(name="name_value"), - update_mask=common.DocumentMask(field_paths=["field_paths_value"]), - ) - - -def test_delete_document(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.DeleteDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_document_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.DeleteDocumentRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - - response = await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_document_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_document), "__call__") as call: - call.return_value = None - - client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_document_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.DeleteDocumentRequest() - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_document), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - - await client.delete_document(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_delete_document_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_document), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_document(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].name == "name_value" - - -def test_delete_document_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - firestore.DeleteDocumentRequest(), name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_document_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_document), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_document(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].name == "name_value" - - -@pytest.mark.asyncio -async def test_delete_document_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_document( - firestore.DeleteDocumentRequest(), name="name_value", - ) - - -def test_batch_get_documents(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.BatchGetDocumentsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - - response = client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -@pytest.mark.asyncio -async def test_batch_get_documents_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.BatchGetDocumentsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.batch_get_documents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - - response = await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.BatchGetDocumentsResponse) - - -def test_batch_get_documents_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.batch_get_documents), "__call__" - ) as call: - call.return_value = iter([firestore.BatchGetDocumentsResponse()]) - - client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_batch_get_documents_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BatchGetDocumentsRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.batch_get_documents), "__call__" - ) as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.BatchGetDocumentsResponse()] - ) - - await client.batch_get_documents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -def test_begin_transaction(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.BeginTransactionRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse( - transaction=b"transaction_blob", - ) - - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - - assert response.transaction == b"transaction_blob" - - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.BeginTransactionRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse(transaction=b"transaction_blob",) - ) - - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.BeginTransactionResponse) - - assert response.transaction == b"transaction_blob" - - -def test_begin_transaction_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.begin_transaction), "__call__" - ) as call: - call.return_value = firestore.BeginTransactionResponse() - - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.BeginTransactionRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.begin_transaction), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse() - ) - - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -def test_begin_transaction_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction(database="database_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - -def test_begin_transaction_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - firestore.BeginTransactionRequest(), database="database_value", - ) - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.begin_transaction), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.BeginTransactionResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.BeginTransactionResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction(database="database_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - firestore.BeginTransactionRequest(), database="database_value", - ) - - -def test_commit(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.CommitRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.CommitRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.CommitResponse) - - -def test_commit_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.commit), "__call__") as call: - call.return_value = firestore.CommitResponse() - - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.CommitRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.commit), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -def test_commit_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - database="database_value", - writes=[gf_write.Write(update=gf_document.Document(name="name_value"))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - assert args[0].writes == [ - gf_write.Write(update=gf_document.Document(name="name_value")) - ] - - -def test_commit_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - firestore.CommitRequest(), - database="database_value", - writes=[gf_write.Write(update=gf_document.Document(name="name_value"))], - ) - - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.commit), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.CommitResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - database="database_value", - writes=[gf_write.Write(update=gf_document.Document(name="name_value"))], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - assert args[0].writes == [ - gf_write.Write(update=gf_document.Document(name="name_value")) - ] - - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - firestore.CommitRequest(), - database="database_value", - writes=[gf_write.Write(update=gf_document.Document(name="name_value"))], - ) - - -def test_rollback(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.RollbackRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.RollbackRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.rollback), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.rollback), "__call__") as call: - call.return_value = None - - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RollbackRequest() - request.database = "database/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.rollback), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "database=database/value",) in kw["metadata"] - - -def test_rollback_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.rollback), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - database="database_value", transaction=b"transaction_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - assert args[0].transaction == b"transaction_blob" - - -def test_rollback_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - firestore.RollbackRequest(), - database="database_value", - transaction=b"transaction_blob", - ) - - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.rollback), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - database="database_value", transaction=b"transaction_blob", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].database == "database_value" - - assert args[0].transaction == b"transaction_blob" - - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - firestore.RollbackRequest(), - database="database_value", - transaction=b"transaction_blob", - ) - - -def test_run_query(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.RunQueryRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.run_query), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.RunQueryResponse()]) - - response = client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.RunQueryResponse) - - -@pytest.mark.asyncio -async def test_run_query_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.RunQueryRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.run_query), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - - response = await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.RunQueryResponse) - - -def test_run_query_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.run_query), "__call__") as call: - call.return_value = iter([firestore.RunQueryResponse()]) - - client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_run_query_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.RunQueryRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.run_query), "__call__" - ) as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.RunQueryResponse()] - ) - - await client.run_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_write(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.WriteRequest() - - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.WriteResponse()]) - - response = client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.WriteResponse) - - -@pytest.mark.asyncio -async def test_write_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.WriteRequest() - - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.write), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[firestore.WriteResponse()]) - - response = await client.write(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.WriteResponse) - - -def test_listen(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListenRequest() - - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.listen), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = iter([firestore.ListenResponse()]) - - response = client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, firestore.ListenResponse) - - -@pytest.mark.asyncio -async def test_listen_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListenRequest() - - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.listen), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock( - side_effect=[firestore.ListenResponse()] - ) - - response = await client.listen(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, firestore.ListenResponse) - - -def test_list_collection_ids(transport: str = "grpc"): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListCollectionIdsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - - response = client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.ListCollectionIdsResponse) - - assert response.collection_ids == ["collection_ids_value"] - - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_collection_ids_async(transport: str = "grpc_asyncio"): - client = FirestoreAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = firestore.ListCollectionIdsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse( - collection_ids=["collection_ids_value"], - next_page_token="next_page_token_value", - ) - ) - - response = await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, firestore.ListCollectionIdsResponse) - - assert response.collection_ids == ["collection_ids_value"] - - assert response.next_page_token == "next_page_token_value" - - -def test_list_collection_ids_field_headers(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_collection_ids), "__call__" - ) as call: - call.return_value = firestore.ListCollectionIdsResponse() - - client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_collection_ids_field_headers_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = firestore.ListCollectionIdsRequest() - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_collection_ids), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse() - ) - - await client.list_collection_ids(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_list_collection_ids_flattened(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_collection_ids(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0].parent == "parent_value" - - -def test_list_collection_ids_flattened_error(): - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_collection_ids( - firestore.ListCollectionIdsRequest(), parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_collection_ids), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = firestore.ListCollectionIdsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - firestore.ListCollectionIdsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_collection_ids(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0].parent == "parent_value" - - -@pytest.mark.asyncio -async def test_list_collection_ids_flattened_error_async(): - client = FirestoreAsyncClient(credentials=credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_collection_ids( - firestore.ListCollectionIdsRequest(), parent="parent_value", - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FirestoreClient( - client_options={"scopes": ["1", "2"]}, transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - client = FirestoreClient(transport=transport) - assert client._transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FirestoreGrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FirestoreGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FirestoreClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.FirestoreGrpcTransport,) - - -def test_firestore_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): - transport = transports.FirestoreTransport( - credentials=credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_firestore_base_transport(): - # Instantiate the base transport. - transport = transports.FirestoreTransport( - credentials=credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_document", - "list_documents", - "create_document", - "update_document", - "delete_document", - "batch_get_documents", - "begin_transaction", - "commit", - "rollback", - "run_query", - "write", - "listen", - "list_collection_ids", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -def test_firestore_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(auth, "load_credentials_from_file") as load_creds: - load_creds.return_value = (credentials.AnonymousCredentials(), None) - transport = transports.FirestoreTransport(credentials_file="credentials.json",) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - ) - - -def test_firestore_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - FirestoreClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - ) - - -def test_firestore_transport_auth_adc(): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.FirestoreGrpcTransport(host="squid.clam.whelk") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ) - ) - - -def test_firestore_host_no_port(): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com" - ), - ) - assert client._transport._host == "firestore.googleapis.com:443" - - -def test_firestore_host_with_port(): - client = FirestoreClient( - credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="firestore.googleapis.com:8000" - ), - ) - assert client._transport._host == "firestore.googleapis.com:8000" - - -def test_firestore_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") - - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() - transport = transports.FirestoreGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -def test_firestore_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") - - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() - transport = transports.FirestoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_firestore_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.FirestoreGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_firestore_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.FirestoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_firestore_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.FirestoreGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_firestore_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.FirestoreGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/datastore", - ), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel diff --git a/tests/unit/v1beta1/__init__.py b/tests/unit/v1beta1/__init__.py deleted file mode 100644 index ab6729095..000000000 --- a/tests/unit/v1beta1/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/unit/v1beta1/_test_cross_language.py b/tests/unit/v1beta1/_test_cross_language.py deleted file mode 100644 index 560a9ae93..000000000 --- a/tests/unit/v1beta1/_test_cross_language.py +++ /dev/null @@ -1,503 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -import glob -import json -import os - -import mock -import pytest - -from google.protobuf import text_format -from google.cloud.firestore_v1beta1.types import document -from google.cloud.firestore_v1beta1.types import firestore -from google.cloud.firestore_v1beta1.proto import test_v1beta1_pb2 -from google.cloud.firestore_v1beta1.types import write - - -def _load_testproto(filename): - with open(filename, "r") as tp_file: - tp_text = tp_file.read() - test_proto = test_v1beta1_pb2.Test() - text_format.Merge(tp_text, test_proto) - shortname = os.path.split(filename)[-1] - test_proto.description = test_proto.description + " (%s)" % shortname - return test_proto - - -_here = os.path.dirname(__file__) -_glob_expr = "{}/testdata/*.textproto".format(_here) -_globs = glob.glob(_glob_expr) -ALL_TESTPROTOS = [_load_testproto(filename) for filename in sorted(_globs)] - -_CREATE_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "create" -] - -_GET_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "get" -] - -_SET_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "set" -] - -_UPDATE_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "update" -] - -_UPDATE_PATHS_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "update_paths" -] - -_DELETE_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "delete" -] - -_LISTEN_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "listen" -] - -_QUERY_TESTPROTOS = [ - test_proto - for test_proto in ALL_TESTPROTOS - if test_proto.WhichOneof("test") == "query" -] - - -def _mock_firestore_api(): - firestore_api = mock.Mock(spec=["commit"]) - commit_response = firestore.CommitResponse(write_results=[write.WriteResult()]) - firestore_api.commit.return_value = commit_response - return firestore_api - - -def _make_client_document(firestore_api, testcase): - from google.cloud.firestore_v1beta1 import Client - from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - import google.auth.credentials - - _, project, _, database, _, doc_path = testcase.doc_ref_path.split("/", 5) - assert database == DEFAULT_DATABASE - - # Attach the fake GAPIC to a real client. - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - - with pytest.deprecated_call(): - client = Client(project=project, credentials=credentials) - - client._firestore_api_internal = firestore_api - return client, client.document(doc_path) - - -def _run_testcase(testcase, call, firestore_api, client): - if getattr(testcase, "is_error", False): - # TODO: is there a subclass of Exception we can check for? - with pytest.raises(Exception): - call() - else: - call() - firestore_api.commit.assert_called_once_with( - client._database_string, - list(testcase.request.writes), - transaction=None, - metadata=client._rpc_metadata, - ) - - -@pytest.mark.parametrize("test_proto", _CREATE_TESTPROTOS) -def test_create_testprotos(test_proto): - testcase = test_proto.create - firestore_api = _mock_firestore_api() - client, document = _make_client_document(firestore_api, testcase) - data = convert_data(json.loads(testcase.json_data)) - call = functools.partial(document.create, data) - _run_testcase(testcase, call, firestore_api, client) - - -@pytest.mark.parametrize("test_proto", _GET_TESTPROTOS) -def test_get_testprotos(test_proto): - testcase = test_proto.get - firestore_api = mock.Mock(spec=["get_document"]) - response = document.Document() - firestore_api.get_document.return_value = response - client, doc = _make_client_document(firestore_api, testcase) - - doc.get() # No '.textprotos' for errors, field_paths. - - firestore_api.get_document.assert_called_once_with( - document._document_path, - mask=None, - transaction=None, - metadata=client._rpc_metadata, - ) - - -@pytest.mark.parametrize("test_proto", _SET_TESTPROTOS) -def test_set_testprotos(test_proto): - testcase = test_proto.set - firestore_api = _mock_firestore_api() - client, document = _make_client_document(firestore_api, testcase) - data = convert_data(json.loads(testcase.json_data)) - if testcase.HasField("option"): - merge = convert_set_option(testcase.option) - else: - merge = False - call = functools.partial(document.set, data, merge=merge) - _run_testcase(testcase, call, firestore_api, client) - - -@pytest.mark.parametrize("test_proto", _UPDATE_TESTPROTOS) -def test_update_testprotos(test_proto): - testcase = test_proto.update - firestore_api = _mock_firestore_api() - client, document = _make_client_document(firestore_api, testcase) - data = convert_data(json.loads(testcase.json_data)) - if testcase.HasField("precondition"): - option = convert_precondition(testcase.precondition) - else: - option = None - call = functools.partial(document.update, data, option) - _run_testcase(testcase, call, firestore_api, client) - - -@pytest.mark.skip(reason="Python has no way to call update with a list of field paths.") -@pytest.mark.parametrize("test_proto", _UPDATE_PATHS_TESTPROTOS) -def test_update_paths_testprotos(test_proto): # pragma: NO COVER - pass - - -@pytest.mark.parametrize("test_proto", _DELETE_TESTPROTOS) -def test_delete_testprotos(test_proto): - testcase = test_proto.delete - firestore_api = _mock_firestore_api() - client, document = _make_client_document(firestore_api, testcase) - if testcase.HasField("precondition"): - option = convert_precondition(testcase.precondition) - else: - option = None - call = functools.partial(document.delete, option) - _run_testcase(testcase, call, firestore_api, client) - - -@pytest.mark.parametrize("test_proto", _LISTEN_TESTPROTOS) -def test_listen_testprotos(test_proto): # pragma: NO COVER - # test_proto.listen has 'reponses' messages, - # 'google.cloud.firestore.v1beta1.ListenResponse' - # and then an expected list of 'snapshots' (local 'Snapshot'), containing - # 'docs' (list of 'google.cloud.firestore.v1beta1.Document'), - # 'changes' (list lof local 'DocChange', and 'read_time' timestamp. - from google.cloud.firestore_v1beta1 import Client - from google.cloud.firestore_v1beta1 import DocumentReference - from google.cloud.firestore_v1beta1 import DocumentSnapshot - from google.cloud.firestore_v1beta1 import Watch - import google.auth.credentials - - testcase = test_proto.listen - testname = test_proto.description - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - - with pytest.deprecated_call(): - client = Client(project="project", credentials=credentials) - - modulename = "google.cloud.firestore_v1beta1.watch" - with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): - with mock.patch( - "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer - ): - with mock.patch( # conformance data sets WATCH_TARGET_ID to 1 - "%s.WATCH_TARGET_ID" % modulename, 1 - ): - snapshots = [] - - def callback(keys, applied_changes, read_time): - snapshots.append((keys, applied_changes, read_time)) - - query = DummyQuery(client=client) - watch = Watch.for_query( - query, callback, DocumentSnapshot, DocumentReference - ) - # conformance data has db string as this - db_str = "projects/projectID/databases/(default)" - watch._firestore._database_string_internal = db_str - - if testcase.is_error: - try: - for proto in testcase.responses: - watch.on_snapshot(proto) - except RuntimeError: - # listen-target-add-wrong-id.textpro - # listen-target-remove.textpro - pass - - else: - for proto in testcase.responses: - watch.on_snapshot(proto) - - assert len(snapshots) == len(testcase.snapshots) - for i, (expected_snapshot, actual_snapshot) in enumerate( - zip(testcase.snapshots, snapshots) - ): - expected_changes = expected_snapshot.changes - actual_changes = actual_snapshot[1] - if len(expected_changes) != len(actual_changes): - raise AssertionError( - "change length mismatch in %s (snapshot #%s)" - % (testname, i) - ) - for y, (expected_change, actual_change) in enumerate( - zip(expected_changes, actual_changes) - ): - expected_change_kind = expected_change.kind - actual_change_kind = actual_change.type.value - if expected_change_kind != actual_change_kind: - raise AssertionError( - "change type mismatch in %s (snapshot #%s, change #%s')" - % (testname, i, y) - ) - - -@pytest.mark.parametrize("test_proto", _QUERY_TESTPROTOS) -def test_query_testprotos(test_proto): # pragma: NO COVER - testcase = test_proto.query - if testcase.is_error: - with pytest.raises(Exception): - query = parse_query(testcase) - query._to_protobuf() - else: - query = parse_query(testcase) - found = query._to_protobuf() - assert found == testcase.query - - -def convert_data(v): - # Replace the strings 'ServerTimestamp' and 'Delete' with the corresponding - # sentinels. - from google.cloud.firestore_v1beta1 import ArrayRemove - from google.cloud.firestore_v1beta1 import ArrayUnion - from google.cloud.firestore_v1beta1 import DELETE_FIELD - from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP - - if v == "ServerTimestamp": - return SERVER_TIMESTAMP - elif v == "Delete": - return DELETE_FIELD - elif isinstance(v, list): - if v[0] == "ArrayRemove": - return ArrayRemove([convert_data(e) for e in v[1:]]) - if v[0] == "ArrayUnion": - return ArrayUnion([convert_data(e) for e in v[1:]]) - return [convert_data(e) for e in v] - elif isinstance(v, dict): - return {k: convert_data(v2) for k, v2 in v.items()} - elif v == "NaN": - return float(v) - else: - return v - - -def convert_set_option(option): - from google.cloud.firestore_v1beta1 import _helpers - - if option.fields: - return [ - _helpers.FieldPath(*field.field).to_api_repr() for field in option.fields - ] - - assert option.all - return True - - -def convert_precondition(precond): - from google.cloud.firestore_v1beta1 import Client - - if precond.HasField("exists"): - return Client.write_option(exists=precond.exists) - - assert precond.HasField("update_time") - return Client.write_option(last_update_time=precond.update_time) - - -class DummyRpc(object): # pragma: NO COVER - def __init__(self, listen, initial_request, should_recover, metadata=None): - self.listen = listen - self.initial_request = initial_request - self.should_recover = should_recover - self.closed = False - self.callbacks = [] - self._metadata = metadata - - def add_done_callback(self, callback): - self.callbacks.append(callback) - - def close(self): - self.closed = True - - -class DummyBackgroundConsumer(object): # pragma: NO COVER - started = False - stopped = False - is_active = True - - def __init__(self, rpc, on_snapshot): - self._rpc = rpc - self.on_snapshot = on_snapshot - - def start(self): - self.started = True - - def stop(self): - self.stopped = True - self.is_active = False - - -class DummyQuery(object): # pragma: NO COVER - def __init__(self, **kw): - self._client = kw["client"] - self._comparator = lambda x, y: 1 - - def _to_protobuf(self): - from google.cloud.firestore_v1beta1.types import query - - query_kwargs = { - "select": None, - "from": None, - "where": None, - "order_by": None, - "start_at": None, - "end_at": None, - } - return query.StructuredQuery(**query_kwargs) - - -def parse_query(testcase): - # 'query' testcase contains: - # - 'coll_path': collection ref path. - # - 'clauses': array of one or more 'Clause' elements - # - 'query': the actual google.cloud.firestore.v1beta1.StructuredQuery message - # to be constructed. - # - 'is_error' (as other testcases). - # - # 'Clause' elements are unions of: - # - 'select': [field paths] - # - 'where': (field_path, op, json_value) - # - 'order_by': (field_path, direction) - # - 'offset': int - # - 'limit': int - # - 'start_at': 'Cursor' - # - 'start_after': 'Cursor' - # - 'end_at': 'Cursor' - # - 'end_before': 'Cursor' - # - # 'Cursor' contains either: - # - 'doc_snapshot': 'DocSnapshot' - # - 'json_values': [string] - # - # 'DocSnapshot' contains: - # 'path': str - # 'json_data': str - from google.auth.credentials import Credentials - from google.cloud.firestore_v1beta1 import Client - from google.cloud.firestore_v1beta1 import Query - - _directions = {"asc": Query.ASCENDING, "desc": Query.DESCENDING} - - credentials = mock.create_autospec(Credentials) - - with pytest.deprecated_call(): - client = Client("projectID", credentials) - - path = parse_path(testcase.coll_path) - collection = client.collection(*path) - query = collection - - for clause in testcase.clauses: - kind = clause.WhichOneof("clause") - - if kind == "select": - field_paths = [ - ".".join(field_path.field) for field_path in clause.select.fields - ] - query = query.select(field_paths) - elif kind == "where": - path = ".".join(clause.where.path.field) - value = convert_data(json.loads(clause.where.json_value)) - query = query.where(path, clause.where.op, value) - elif kind == "order_by": - path = ".".join(clause.order_by.path.field) - direction = clause.order_by.direction - direction = _directions.get(direction, direction) - query = query.order_by(path, direction=direction) - elif kind == "offset": - query = query.offset(clause.offset) - elif kind == "limit": - query = query.limit(clause.limit) - elif kind == "start_at": - cursor = parse_cursor(clause.start_at, client) - query = query.start_at(cursor) - elif kind == "start_after": - cursor = parse_cursor(clause.start_after, client) - query = query.start_after(cursor) - elif kind == "end_at": - cursor = parse_cursor(clause.end_at, client) - query = query.end_at(cursor) - elif kind == "end_before": - cursor = parse_cursor(clause.end_before, client) - query = query.end_before(cursor) - else: # pragma: NO COVER - raise ValueError("Unknown query clause: {}".format(kind)) - - return query - - -def parse_path(path): - _, relative = path.split("documents/") - return relative.split("/") - - -def parse_cursor(cursor, client): - from google.cloud.firestore_v1beta1 import DocumentReference - from google.cloud.firestore_v1beta1 import DocumentSnapshot - - if cursor.HasField("doc_snapshot"): - path = parse_path(cursor.doc_snapshot.path) - doc_ref = DocumentReference(*path, client=client) - - return DocumentSnapshot( - reference=doc_ref, - data=json.loads(cursor.doc_snapshot.json_data), - exists=True, - read_time=None, - create_time=None, - update_time=None, - ) - - values = [json.loads(value) for value in cursor.json_values] - return convert_data(values) diff --git a/tests/unit/v1beta1/test__helpers.py b/tests/unit/v1beta1/test__helpers.py deleted file mode 100644 index 5f0743854..000000000 --- a/tests/unit/v1beta1/test__helpers.py +++ /dev/null @@ -1,2087 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import sys -import unittest - -import mock -import pytest - - -class TestGeoPoint(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1._helpers import GeoPoint - - return GeoPoint - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - lat = 81.25 - lng = 359.984375 - geo_pt = self._make_one(lat, lng) - self.assertEqual(geo_pt.latitude, lat) - self.assertEqual(geo_pt.longitude, lng) - - def test_to_protobuf(self): - from google.type import latlng_pb2 - - lat = 0.015625 - lng = 20.03125 - geo_pt = self._make_one(lat, lng) - result = geo_pt.to_protobuf() - geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng) - self.assertEqual(result, geo_pt_pb) - - def test___eq__(self): - lat = 0.015625 - lng = 20.03125 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = self._make_one(lat, lng) - self.assertEqual(geo_pt1, geo_pt2) - - def test___eq__type_differ(self): - lat = 0.015625 - lng = 20.03125 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = object() - self.assertNotEqual(geo_pt1, geo_pt2) - self.assertIs(geo_pt1.__eq__(geo_pt2), NotImplemented) - - def test___ne__same_value(self): - lat = 0.015625 - lng = 20.03125 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = self._make_one(lat, lng) - comparison_val = geo_pt1 != geo_pt2 - self.assertFalse(comparison_val) - - def test___ne__(self): - geo_pt1 = self._make_one(0.0, 1.0) - geo_pt2 = self._make_one(2.0, 3.0) - self.assertNotEqual(geo_pt1, geo_pt2) - - def test___ne__type_differ(self): - lat = 0.015625 - lng = 20.03125 - geo_pt1 = self._make_one(lat, lng) - geo_pt2 = object() - self.assertNotEqual(geo_pt1, geo_pt2) - self.assertIs(geo_pt1.__ne__(geo_pt2), NotImplemented) - - -class Test_verify_path(unittest.TestCase): - @staticmethod - def _call_fut(path, is_collection): - from google.cloud.firestore_v1beta1._helpers import verify_path - - return verify_path(path, is_collection) - - def test_empty(self): - path = () - with self.assertRaises(ValueError): - self._call_fut(path, True) - with self.assertRaises(ValueError): - self._call_fut(path, False) - - def test_wrong_length_collection(self): - path = ("foo", "bar") - with self.assertRaises(ValueError): - self._call_fut(path, True) - - def test_wrong_length_document(self): - path = ("Kind",) - with self.assertRaises(ValueError): - self._call_fut(path, False) - - def test_wrong_type_collection(self): - path = (99, "ninety-nine", "zap") - with self.assertRaises(ValueError): - self._call_fut(path, True) - - def test_wrong_type_document(self): - path = ("Users", "Ada", "Candy", {}) - with self.assertRaises(ValueError): - self._call_fut(path, False) - - def test_success_collection(self): - path = ("Computer", "Magic", "Win") - ret_val = self._call_fut(path, True) - # NOTE: We are just checking that it didn't fail. - self.assertIsNone(ret_val) - - def test_success_document(self): - path = ("Tokenizer", "Seventeen", "Cheese", "Burger") - ret_val = self._call_fut(path, False) - # NOTE: We are just checking that it didn't fail. - self.assertIsNone(ret_val) - - -class Test_encode_value(unittest.TestCase): - @staticmethod - def _call_fut(value): - from google.cloud.firestore_v1beta1._helpers import encode_value - - return encode_value(value) - - def test_none(self): - from google.protobuf import struct_pb2 - - result = self._call_fut(None) - expected = _value_pb(null_value=struct_pb2.NULL_VALUE) - self.assertEqual(result, expected) - - def test_boolean(self): - result = self._call_fut(True) - expected = _value_pb(boolean_value=True) - self.assertEqual(result, expected) - - def test_integer(self): - value = 425178 - result = self._call_fut(value) - expected = _value_pb(integer_value=value) - self.assertEqual(result, expected) - - def test_float(self): - value = 123.4453125 - result = self._call_fut(value) - expected = _value_pb(double_value=value) - self.assertEqual(result, expected) - - def test_datetime_with_nanos(self): - from google.api_core.datetime_helpers import DatetimeWithNanoseconds - from google.protobuf import timestamp_pb2 - - dt_seconds = 1488768504 - dt_nanos = 458816991 - timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) - dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb) - - result = self._call_fut(dt_val) - expected = _value_pb(timestamp_value=timestamp_pb) - self.assertEqual(result, expected) - - def test_datetime_wo_nanos(self): - from google.protobuf import timestamp_pb2 - - dt_seconds = 1488768504 - dt_nanos = 458816000 - # Make sure precision is valid in microseconds too. - self.assertEqual(dt_nanos % 1000, 0) - dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) - - result = self._call_fut(dt_val) - timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) - expected = _value_pb(timestamp_value=timestamp_pb) - self.assertEqual(result, expected) - - def test_string(self): - value = u"\u2018left quote, right quote\u2019" - result = self._call_fut(value) - expected = _value_pb(string_value=value) - self.assertEqual(result, expected) - - def test_bytes(self): - value = b"\xe3\xf2\xff\x00" - result = self._call_fut(value) - expected = _value_pb(bytes_value=value) - self.assertEqual(result, expected) - - def test_reference_value(self): - client = _make_client() - - value = client.document("my", "friend") - result = self._call_fut(value) - expected = _value_pb(reference_value=value._document_path) - self.assertEqual(result, expected) - - def test_geo_point(self): - from google.cloud.firestore_v1beta1._helpers import GeoPoint - - value = GeoPoint(50.5, 88.75) - result = self._call_fut(value) - expected = _value_pb(geo_point_value=value.to_protobuf()) - self.assertEqual(result, expected) - - def test_array(self): - from google.cloud.firestore_v1beta1.types.document import ArrayValue - - result = self._call_fut([99, True, 118.5]) - - array_pb = ArrayValue( - values=[ - _value_pb(integer_value=99), - _value_pb(boolean_value=True), - _value_pb(double_value=118.5), - ] - ) - expected = _value_pb(array_value=array_pb) - self.assertEqual(result, expected) - - def test_map(self): - from google.cloud.firestore_v1beta1.types.document import MapValue - - result = self._call_fut({"abc": 285, "def": b"piglatin"}) - - map_pb = MapValue( - fields={ - "abc": _value_pb(integer_value=285), - "def": _value_pb(bytes_value=b"piglatin"), - } - ) - expected = _value_pb(map_value=map_pb) - self.assertEqual(result, expected) - - def test_bad_type(self): - value = object() - with self.assertRaises(TypeError): - self._call_fut(value) - - -class Test_encode_dict(unittest.TestCase): - @staticmethod - def _call_fut(values_dict): - from google.cloud.firestore_v1beta1._helpers import encode_dict - - return encode_dict(values_dict) - - def test_many_types(self): - from google.protobuf import struct_pb2 - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types.document import ArrayValue - from google.cloud.firestore_v1beta1.types.document import MapValue - - dt_seconds = 1497397225 - dt_nanos = 465964000 - # Make sure precision is valid in microseconds too. - self.assertEqual(dt_nanos % 1000, 0) - dt_val = datetime.datetime.utcfromtimestamp(dt_seconds + 1e-9 * dt_nanos) - - client = _make_client() - document = client.document("most", "adjective", "thing", "here") - - values_dict = { - "foo": None, - "bar": True, - "baz": 981, - "quux": 2.875, - "quuz": dt_val, - "corge": u"\N{snowman}", - "grault": b"\xe2\x98\x83", - "wibble": document, - "garply": [u"fork", 4.0], - "waldo": {"fred": u"zap", "thud": False}, - } - encoded_dict = self._call_fut(values_dict) - expected_dict = { - "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), - "bar": _value_pb(boolean_value=True), - "baz": _value_pb(integer_value=981), - "quux": _value_pb(double_value=2.875), - "quuz": _value_pb( - timestamp_value=timestamp_pb2.Timestamp( - seconds=dt_seconds, nanos=dt_nanos - ) - ), - "corge": _value_pb(string_value=u"\N{snowman}"), - "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), - "wibble": _value_pb(reference_value=document._document_path), - "garply": _value_pb( - array_value=ArrayValue( - values=[ - _value_pb(string_value=u"fork"), - _value_pb(double_value=4.0), - ] - ) - ), - "waldo": _value_pb( - map_value=MapValue( - fields={ - "fred": _value_pb(string_value=u"zap"), - "thud": _value_pb(boolean_value=False), - } - ) - ), - } - self.assertEqual(encoded_dict, expected_dict) - - -class Test_reference_value_to_document(unittest.TestCase): - @staticmethod - def _call_fut(reference_value, client): - from google.cloud.firestore_v1beta1._helpers import reference_value_to_document - - return reference_value_to_document(reference_value, client) - - def test_bad_format(self): - from google.cloud.firestore_v1beta1._helpers import BAD_REFERENCE_ERROR - - reference_value = "not/the/right/format" - with self.assertRaises(ValueError) as exc_info: - self._call_fut(reference_value, None) - - err_msg = BAD_REFERENCE_ERROR.format(reference_value) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - def test_same_client(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - client = _make_client() - document = client.document("that", "this") - reference_value = document._document_path - - new_document = self._call_fut(reference_value, client) - self.assertIsNot(new_document, document) - - self.assertIsInstance(new_document, DocumentReference) - self.assertIs(new_document._client, client) - self.assertEqual(new_document._path, document._path) - - def test_different_client(self): - from google.cloud.firestore_v1beta1._helpers import WRONG_APP_REFERENCE - - client1 = _make_client(project="kirk") - document = client1.document("tin", "foil") - reference_value = document._document_path - - client2 = _make_client(project="spock") - with self.assertRaises(ValueError) as exc_info: - self._call_fut(reference_value, client2) - - err_msg = WRONG_APP_REFERENCE.format(reference_value, client2._database_string) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - -class Test_decode_value(unittest.TestCase): - @staticmethod - def _call_fut(value, client=mock.sentinel.client): - from google.cloud.firestore_v1beta1._helpers import decode_value - - return decode_value(value, client) - - def test_none(self): - from google.protobuf import struct_pb2 - - value = _value_pb(null_value=struct_pb2.NULL_VALUE) - self.assertIsNone(self._call_fut(value)) - - def test_bool(self): - value1 = _value_pb(boolean_value=True) - self.assertTrue(self._call_fut(value1)) - value2 = _value_pb(boolean_value=False) - self.assertFalse(self._call_fut(value2)) - - def test_int(self): - int_val = 29871 - value = _value_pb(integer_value=int_val) - self.assertEqual(self._call_fut(value), int_val) - - def test_float(self): - float_val = 85.9296875 - value = _value_pb(double_value=float_val) - self.assertEqual(self._call_fut(value), float_val) - - @unittest.skipIf( - (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" - ) - def test_datetime(self): - from google.api_core.datetime_helpers import DatetimeWithNanoseconds - from google.protobuf import timestamp_pb2 - - dt_seconds = 552855006 - dt_nanos = 766961828 - - timestamp_pb = timestamp_pb2.Timestamp(seconds=dt_seconds, nanos=dt_nanos) - value = _value_pb(timestamp_value=timestamp_pb) - - expected_dt_val = DatetimeWithNanoseconds.from_timestamp_pb(timestamp_pb) - self.assertEqual(self._call_fut(value), expected_dt_val) - - def test_unicode(self): - unicode_val = u"zorgon" - value = _value_pb(string_value=unicode_val) - self.assertEqual(self._call_fut(value), unicode_val) - - def test_bytes(self): - bytes_val = b"abc\x80" - value = _value_pb(bytes_value=bytes_val) - self.assertEqual(self._call_fut(value), bytes_val) - - def test_reference(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - client = _make_client() - path = (u"then", u"there-was-one") - document = client.document(*path) - ref_string = document._document_path - value = _value_pb(reference_value=ref_string) - - result = self._call_fut(value, client) - self.assertIsInstance(result, DocumentReference) - self.assertIs(result._client, client) - self.assertEqual(result._path, path) - - def test_geo_point(self): - from google.cloud.firestore_v1beta1._helpers import GeoPoint - - geo_pt = GeoPoint(latitude=42.5, longitude=99.0625) - value = _value_pb(geo_point_value=geo_pt.to_protobuf()) - self.assertEqual(self._call_fut(value), geo_pt) - - def test_array(self): - from google.cloud.firestore_v1beta1.types import document - - sub_value1 = _value_pb(boolean_value=True) - sub_value2 = _value_pb(double_value=14.1396484375) - sub_value3 = _value_pb(bytes_value=b"\xde\xad\xbe\xef") - array_pb = document.ArrayValue(values=[sub_value1, sub_value2, sub_value3]) - value = _value_pb(array_value=array_pb) - - expected = [ - sub_value1.boolean_value, - sub_value2.double_value, - sub_value3.bytes_value, - ] - self.assertEqual(self._call_fut(value), expected) - - def test_map(self): - from google.cloud.firestore_v1beta1.types import document - - sub_value1 = _value_pb(integer_value=187680) - sub_value2 = _value_pb(string_value=u"how low can you go?") - map_pb = document.MapValue(fields={"first": sub_value1, "second": sub_value2}) - value = _value_pb(map_value=map_pb) - - expected = { - "first": sub_value1.integer_value, - "second": sub_value2.string_value, - } - self.assertEqual(self._call_fut(value), expected) - - def test_nested_map(self): - from google.cloud.firestore_v1beta1.types import document - - actual_value1 = 1009876 - actual_value2 = u"hey you guys" - actual_value3 = 90.875 - map_pb1 = document.MapValue( - fields={ - "lowest": _value_pb(integer_value=actual_value1), - "aside": _value_pb(string_value=actual_value2), - } - ) - map_pb2 = document.MapValue( - fields={ - "middle": _value_pb(map_value=map_pb1), - "aside": _value_pb(boolean_value=True), - } - ) - map_pb3 = document.MapValue( - fields={ - "highest": _value_pb(map_value=map_pb2), - "aside": _value_pb(double_value=actual_value3), - } - ) - value = _value_pb(map_value=map_pb3) - - expected = { - "highest": { - "middle": {"lowest": actual_value1, "aside": actual_value2}, - "aside": True, - }, - "aside": actual_value3, - } - self.assertEqual(self._call_fut(value), expected) - - def test_unset_value_type(self): - with self.assertRaises(ValueError): - self._call_fut(_value_pb()) - - def test_unknown_value_type(self): - value_pb = mock.Mock() - value_pb._pb.WhichOneof.return_value = "zoob_value" - - with self.assertRaises(ValueError): - self._call_fut(value_pb) - - value_pb._pb.WhichOneof.assert_called_once_with("value_type") - - -class Test_decode_dict(unittest.TestCase): - @staticmethod - def _call_fut(value_fields, client=mock.sentinel.client): - from google.cloud.firestore_v1beta1._helpers import decode_dict - - return decode_dict(value_fields, client) - - @unittest.skipIf( - (3,) <= sys.version_info < (3, 4, 4), "known datetime bug (bpo-23517) in Python" - ) - def test_many_types(self): - from google.protobuf import struct_pb2 - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types.document import ArrayValue - from google.cloud.firestore_v1beta1.types.document import MapValue - from google.cloud._helpers import UTC - from google.cloud.firestore_v1beta1.field_path import FieldPath - - dt_seconds = 1394037350 - dt_nanos = 667285000 - # Make sure precision is valid in microseconds too. - self.assertEqual(dt_nanos % 1000, 0) - dt_val = datetime.datetime.utcfromtimestamp( - dt_seconds + 1e-9 * dt_nanos - ).replace(tzinfo=UTC) - - value_fields = { - "foo": _value_pb(null_value=struct_pb2.NULL_VALUE), - "bar": _value_pb(boolean_value=True), - "baz": _value_pb(integer_value=981), - "quux": _value_pb(double_value=2.875), - "quuz": _value_pb( - timestamp_value=timestamp_pb2.Timestamp( - seconds=dt_seconds, nanos=dt_nanos - ) - ), - "corge": _value_pb(string_value=u"\N{snowman}"), - "grault": _value_pb(bytes_value=b"\xe2\x98\x83"), - "garply": _value_pb( - array_value=ArrayValue( - values=[ - _value_pb(string_value=u"fork"), - _value_pb(double_value=4.0), - ] - ) - ), - "waldo": _value_pb( - map_value=MapValue( - fields={ - "fred": _value_pb(string_value=u"zap"), - "thud": _value_pb(boolean_value=False), - } - ) - ), - FieldPath("a", "b", "c").to_api_repr(): _value_pb(boolean_value=False), - } - expected = { - "foo": None, - "bar": True, - "baz": 981, - "quux": 2.875, - "quuz": dt_val, - "corge": u"\N{snowman}", - "grault": b"\xe2\x98\x83", - "garply": [u"fork", 4.0], - "waldo": {"fred": u"zap", "thud": False}, - "a.b.c": False, - } - self.assertEqual(self._call_fut(value_fields), expected) - - -class Test_get_doc_id(unittest.TestCase): - @staticmethod - def _call_fut(document_pb, expected_prefix): - from google.cloud.firestore_v1beta1._helpers import get_doc_id - - return get_doc_id(document_pb, expected_prefix) - - @staticmethod - def _dummy_ref_string(collection_id): - from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - - project = u"bazzzz" - return u"projects/{}/databases/{}/documents/{}".format( - project, DEFAULT_DATABASE, collection_id - ) - - def test_success(self): - from google.cloud.firestore_v1beta1.types import document - - prefix = self._dummy_ref_string("sub-collection") - actual_id = "this-is-the-one" - name = "{}/{}".format(prefix, actual_id) - - document_pb = document.Document(name=name) - document_id = self._call_fut(document_pb, prefix) - self.assertEqual(document_id, actual_id) - - def test_failure(self): - from google.cloud.firestore_v1beta1.types import document - - actual_prefix = self._dummy_ref_string("the-right-one") - wrong_prefix = self._dummy_ref_string("the-wrong-one") - name = "{}/{}".format(actual_prefix, "sorry-wont-works") - - document_pb = document.Document(name=name) - with self.assertRaises(ValueError) as exc_info: - self._call_fut(document_pb, wrong_prefix) - - exc_args = exc_info.exception.args - self.assertEqual(len(exc_args), 4) - self.assertEqual(exc_args[1], name) - self.assertEqual(exc_args[3], wrong_prefix) - - -class Test_extract_fields(unittest.TestCase): - @staticmethod - def _call_fut(document_data, prefix_path, expand_dots=False): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.extract_fields( - document_data, prefix_path, expand_dots=expand_dots - ) - - def test_w_empty_document(self): - from google.cloud.firestore_v1beta1._helpers import _EmptyDict - - document_data = {} - prefix_path = _make_field_path() - expected = [(_make_field_path(), _EmptyDict)] - - iterator = self._call_fut(document_data, prefix_path) - self.assertEqual(list(iterator), expected) - - def test_w_invalid_key_and_expand_dots(self): - document_data = {"b": 1, "a~d": 2, "c": 3} - prefix_path = _make_field_path() - - with self.assertRaises(ValueError): - list(self._call_fut(document_data, prefix_path, expand_dots=True)) - - def test_w_shallow_keys(self): - document_data = {"b": 1, "a": 2, "c": 3} - prefix_path = _make_field_path() - expected = [ - (_make_field_path("a"), 2), - (_make_field_path("b"), 1), - (_make_field_path("c"), 3), - ] - - iterator = self._call_fut(document_data, prefix_path) - self.assertEqual(list(iterator), expected) - - def test_w_nested(self): - from google.cloud.firestore_v1beta1._helpers import _EmptyDict - - document_data = {"b": {"a": {"d": 4, "c": 3, "g": {}}, "e": 7}, "f": 5} - prefix_path = _make_field_path() - expected = [ - (_make_field_path("b", "a", "c"), 3), - (_make_field_path("b", "a", "d"), 4), - (_make_field_path("b", "a", "g"), _EmptyDict), - (_make_field_path("b", "e"), 7), - (_make_field_path("f"), 5), - ] - - iterator = self._call_fut(document_data, prefix_path) - self.assertEqual(list(iterator), expected) - - def test_w_expand_dotted(self): - from google.cloud.firestore_v1beta1._helpers import _EmptyDict - - document_data = { - "b": {"a": {"d": 4, "c": 3, "g": {}, "k.l.m": 17}, "e": 7}, - "f": 5, - "h.i.j": 9, - } - prefix_path = _make_field_path() - expected = [ - (_make_field_path("b", "a", "c"), 3), - (_make_field_path("b", "a", "d"), 4), - (_make_field_path("b", "a", "g"), _EmptyDict), - (_make_field_path("b", "a", "k.l.m"), 17), - (_make_field_path("b", "e"), 7), - (_make_field_path("f"), 5), - (_make_field_path("h", "i", "j"), 9), - ] - - iterator = self._call_fut(document_data, prefix_path, expand_dots=True) - self.assertEqual(list(iterator), expected) - - -class Test_set_field_value(unittest.TestCase): - @staticmethod - def _call_fut(document_data, field_path, value): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.set_field_value(document_data, field_path, value) - - def test_normal_value_w_shallow(self): - document = {} - field_path = _make_field_path("a") - value = 3 - - self._call_fut(document, field_path, value) - - self.assertEqual(document, {"a": 3}) - - def test_normal_value_w_nested(self): - document = {} - field_path = _make_field_path("a", "b", "c") - value = 3 - - self._call_fut(document, field_path, value) - - self.assertEqual(document, {"a": {"b": {"c": 3}}}) - - def test_empty_dict_w_shallow(self): - from google.cloud.firestore_v1beta1._helpers import _EmptyDict - - document = {} - field_path = _make_field_path("a") - value = _EmptyDict - - self._call_fut(document, field_path, value) - - self.assertEqual(document, {"a": {}}) - - def test_empty_dict_w_nested(self): - from google.cloud.firestore_v1beta1._helpers import _EmptyDict - - document = {} - field_path = _make_field_path("a", "b", "c") - value = _EmptyDict - - self._call_fut(document, field_path, value) - - self.assertEqual(document, {"a": {"b": {"c": {}}}}) - - -class Test_get_field_value(unittest.TestCase): - @staticmethod - def _call_fut(document_data, field_path): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.get_field_value(document_data, field_path) - - def test_w_empty_path(self): - document = {} - - with self.assertRaises(ValueError): - self._call_fut(document, _make_field_path()) - - def test_miss_shallow(self): - document = {} - - with self.assertRaises(KeyError): - self._call_fut(document, _make_field_path("nonesuch")) - - def test_miss_nested(self): - document = {"a": {"b": {}}} - - with self.assertRaises(KeyError): - self._call_fut(document, _make_field_path("a", "b", "c")) - - def test_hit_shallow(self): - document = {"a": 1} - - self.assertEqual(self._call_fut(document, _make_field_path("a")), 1) - - def test_hit_nested(self): - document = {"a": {"b": {"c": 1}}} - - self.assertEqual(self._call_fut(document, _make_field_path("a", "b", "c")), 1) - - -class TestDocumentExtractor(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.DocumentExtractor - - def _make_one(self, document_data): - return self._get_target_class()(document_data) - - def test_ctor_w_empty_document(self): - document_data = {} - - inst = self._make_one(document_data) - - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertTrue(inst.empty_document) - self.assertFalse(inst.has_transforms) - self.assertEqual(inst.transform_paths, []) - - def test_ctor_w_delete_field_shallow(self): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - document_data = {"a": DELETE_FIELD} - - inst = self._make_one(document_data) - - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, [_make_field_path("a")]) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - self.assertEqual(inst.transform_paths, []) - - def test_ctor_w_delete_field_nested(self): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - document_data = {"a": {"b": {"c": DELETE_FIELD}}} - - inst = self._make_one(document_data) - - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, [_make_field_path("a", "b", "c")]) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - self.assertEqual(inst.transform_paths, []) - - def test_ctor_w_server_timestamp_shallow(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_data = {"a": SERVER_TIMESTAMP} - - inst = self._make_one(document_data) - - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, [_make_field_path("a")]) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a")]) - - def test_ctor_w_server_timestamp_nested(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} - - inst = self._make_one(document_data) - - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, [_make_field_path("a", "b", "c")]) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) - - def test_ctor_w_array_remove_shallow(self): - from google.cloud.firestore_v1beta1.transforms import ArrayRemove - - values = [1, 3, 5] - document_data = {"a": ArrayRemove(values)} - - inst = self._make_one(document_data) - - expected_array_removes = {_make_field_path("a"): values} - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, expected_array_removes) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a")]) - - def test_ctor_w_array_remove_nested(self): - from google.cloud.firestore_v1beta1.transforms import ArrayRemove - - values = [2, 4, 8] - document_data = {"a": {"b": {"c": ArrayRemove(values)}}} - - inst = self._make_one(document_data) - - expected_array_removes = {_make_field_path("a", "b", "c"): values} - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, expected_array_removes) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) - - def test_ctor_w_array_union_shallow(self): - from google.cloud.firestore_v1beta1.transforms import ArrayUnion - - values = [1, 3, 5] - document_data = {"a": ArrayUnion(values)} - - inst = self._make_one(document_data) - - expected_array_unions = {_make_field_path("a"): values} - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, expected_array_unions) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a")]) - - def test_ctor_w_array_union_nested(self): - from google.cloud.firestore_v1beta1.transforms import ArrayUnion - - values = [2, 4, 8] - document_data = {"a": {"b": {"c": ArrayUnion(values)}}} - - inst = self._make_one(document_data) - - expected_array_unions = {_make_field_path("a", "b", "c"): values} - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, []) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, expected_array_unions) - self.assertEqual(inst.set_fields, {}) - self.assertFalse(inst.empty_document) - self.assertTrue(inst.has_transforms) - self.assertEqual(inst.transform_paths, [_make_field_path("a", "b", "c")]) - - def test_ctor_w_empty_dict_shallow(self): - document_data = {"a": {}} - - inst = self._make_one(document_data) - - expected_field_paths = [_make_field_path("a")] - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, expected_field_paths) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, document_data) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - self.assertEqual(inst.transform_paths, []) - - def test_ctor_w_empty_dict_nested(self): - document_data = {"a": {"b": {"c": {}}}} - - inst = self._make_one(document_data) - - expected_field_paths = [_make_field_path("a", "b", "c")] - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, expected_field_paths) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, document_data) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - self.assertEqual(inst.transform_paths, []) - - def test_ctor_w_normal_value_shallow(self): - document_data = {"b": 1, "a": 2, "c": 3} - - inst = self._make_one(document_data) - - expected_field_paths = [ - _make_field_path("a"), - _make_field_path("b"), - _make_field_path("c"), - ] - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, expected_field_paths) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, document_data) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - - def test_ctor_w_normal_value_nested(self): - document_data = {"b": {"a": {"d": 4, "c": 3}, "e": 7}, "f": 5} - - inst = self._make_one(document_data) - - expected_field_paths = [ - _make_field_path("b", "a", "c"), - _make_field_path("b", "a", "d"), - _make_field_path("b", "e"), - _make_field_path("f"), - ] - self.assertEqual(inst.document_data, document_data) - self.assertEqual(inst.field_paths, expected_field_paths) - self.assertEqual(inst.deleted_fields, []) - self.assertEqual(inst.server_timestamps, []) - self.assertEqual(inst.array_removes, {}) - self.assertEqual(inst.array_unions, {}) - self.assertEqual(inst.set_fields, document_data) - self.assertFalse(inst.empty_document) - self.assertFalse(inst.has_transforms) - - def test_get_update_pb_w_exists_precondition(self): - from google.cloud.firestore_v1beta1.types import write - - document_data = {} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - update_pb = inst.get_update_pb(document_path, exists=False) - - self.assertIsInstance(update_pb, write.Write) - self.assertEqual(update_pb.update.name, document_path) - self.assertEqual(update_pb.update.fields, document_data) - self.assertTrue(update_pb._pb.HasField("current_document")) - self.assertFalse(update_pb.current_document.exists) - - def test_get_update_pb_wo_exists_precondition(self): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1._helpers import encode_dict - - document_data = {"a": 1} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - update_pb = inst.get_update_pb(document_path) - - self.assertIsInstance(update_pb, write.Write) - self.assertEqual(update_pb.update.name, document_path) - self.assertEqual(update_pb.update.fields, encode_dict(document_data)) - self.assertFalse(update_pb._pb.HasField("current_document")) - - def test_get_transform_pb_w_server_timestamp_w_exists_precondition(self): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM - - document_data = {"a": SERVER_TIMESTAMP} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - transform_pb = inst.get_transform_pb(document_path, exists=False) - - self.assertIsInstance(transform_pb, write.Write) - self.assertEqual(transform_pb.transform.document, document_path) - transforms = transform_pb.transform.field_transforms - self.assertEqual(len(transforms), 1) - transform = transforms[0] - self.assertEqual(transform.field_path, "a") - self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) - self.assertTrue(transform_pb._pb.HasField("current_document")) - self.assertFalse(transform_pb.current_document.exists) - - def test_get_transform_pb_w_server_timestamp_wo_exists_precondition(self): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - from google.cloud.firestore_v1beta1._helpers import REQUEST_TIME_ENUM - - document_data = {"a": {"b": {"c": SERVER_TIMESTAMP}}} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - transform_pb = inst.get_transform_pb(document_path) - - self.assertIsInstance(transform_pb, write.Write) - self.assertEqual(transform_pb.transform.document, document_path) - transforms = transform_pb.transform.field_transforms - self.assertEqual(len(transforms), 1) - transform = transforms[0] - self.assertEqual(transform.field_path, "a.b.c") - self.assertEqual(transform.set_to_server_value, REQUEST_TIME_ENUM) - self.assertFalse(transform_pb._pb.HasField("current_document")) - - @staticmethod - def _array_value_to_list(array_value): - from google.cloud.firestore_v1beta1._helpers import decode_value - - return [decode_value(element, client=None) for element in array_value.values] - - def test_get_transform_pb_w_array_remove(self): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1.transforms import ArrayRemove - - values = [2, 4, 8] - document_data = {"a": {"b": {"c": ArrayRemove(values)}}} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - transform_pb = inst.get_transform_pb(document_path) - - self.assertIsInstance(transform_pb, write.Write) - self.assertEqual(transform_pb.transform.document, document_path) - transforms = transform_pb.transform.field_transforms - self.assertEqual(len(transforms), 1) - transform = transforms[0] - self.assertEqual(transform.field_path, "a.b.c") - removed = self._array_value_to_list(transform.remove_all_from_array) - self.assertEqual(removed, values) - self.assertFalse(transform_pb._pb.HasField("current_document")) - - def test_get_transform_pb_w_array_union(self): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1.transforms import ArrayUnion - - values = [1, 3, 5] - document_data = {"a": {"b": {"c": ArrayUnion(values)}}} - inst = self._make_one(document_data) - document_path = ( - "projects/project-id/databases/(default)/" "documents/document-id" - ) - - transform_pb = inst.get_transform_pb(document_path) - - self.assertIsInstance(transform_pb, write.Write) - self.assertEqual(transform_pb.transform.document, document_path) - transforms = transform_pb.transform.field_transforms - self.assertEqual(len(transforms), 1) - transform = transforms[0] - self.assertEqual(transform.field_path, "a.b.c") - added = self._array_value_to_list(transform.append_missing_elements) - self.assertEqual(added, values) - self.assertFalse(transform_pb._pb.HasField("current_document")) - - -class Test_pbs_for_create(unittest.TestCase): - @staticmethod - def _call_fut(document_path, document_data): - from google.cloud.firestore_v1beta1._helpers import pbs_for_create - - return pbs_for_create(document_path, document_data) - - @staticmethod - def _make_write_w_document(document_path, **data): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1._helpers import encode_dict - from google.cloud.firestore_v1beta1.types import common - - return write.Write( - update=document.Document(name=document_path, fields=encode_dict(data)), - current_document=common.Precondition(exists=False), - ) - - @staticmethod - def _make_write_w_transform(document_path, fields): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import DocumentTransform - - server_val = DocumentTransform.FieldTransform.ServerValue - transforms = [ - write.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME - ) - for field in fields - ] - - return write.Write( - transform=write.DocumentTransform( - document=document_path, field_transforms=transforms - ) - ) - - def _helper(self, do_transform=False, empty_val=False): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {"cheese": 1.5, "crackers": True} - - if do_transform: - document_data["butter"] = SERVER_TIMESTAMP - - if empty_val: - document_data["mustard"] = {} - - write_pbs = self._call_fut(document_path, document_data) - - if empty_val: - update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, mustard={} - ) - else: - update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True - ) - expected_pbs = [update_pb] - - if do_transform: - expected_pbs.append( - self._make_write_w_transform(document_path, fields=["butter"]) - ) - - self.assertEqual(write_pbs, expected_pbs) - - def test_without_transform(self): - self._helper() - - def test_w_transform(self): - self._helper(do_transform=True) - - def test_w_transform_and_empty_value(self): - self._helper(do_transform=True, empty_val=True) - - -class Test_pbs_for_set_no_merge(unittest.TestCase): - @staticmethod - def _call_fut(document_path, document_data): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.pbs_for_set_no_merge(document_path, document_data) - - @staticmethod - def _make_write_w_document(document_path, **data): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1._helpers import encode_dict - - return write.Write( - update=document.Document(name=document_path, fields=encode_dict(data)) - ) - - @staticmethod - def _make_write_w_transform(document_path, fields): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import DocumentTransform - - server_val = DocumentTransform.FieldTransform.ServerValue - transforms = [ - write.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME - ) - for field in fields - ] - - return write.Write( - transform=write.DocumentTransform( - document=document_path, field_transforms=transforms - ) - ) - - def test_w_empty_document(self): - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {} - - write_pbs = self._call_fut(document_path, document_data) - - update_pb = self._make_write_w_document(document_path) - expected_pbs = [update_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_w_only_server_timestamp(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {"butter": SERVER_TIMESTAMP} - - write_pbs = self._call_fut(document_path, document_data) - - update_pb = self._make_write_w_document(document_path) - transform_pb = self._make_write_w_transform(document_path, ["butter"]) - expected_pbs = [update_pb, transform_pb] - self.assertEqual(write_pbs, expected_pbs) - - def _helper(self, do_transform=False, empty_val=False): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {"cheese": 1.5, "crackers": True} - - if do_transform: - document_data["butter"] = SERVER_TIMESTAMP - - if empty_val: - document_data["mustard"] = {} - - write_pbs = self._call_fut(document_path, document_data) - - if empty_val: - update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True, mustard={} - ) - else: - update_pb = self._make_write_w_document( - document_path, cheese=1.5, crackers=True - ) - expected_pbs = [update_pb] - - if do_transform: - expected_pbs.append( - self._make_write_w_transform(document_path, fields=["butter"]) - ) - - self.assertEqual(write_pbs, expected_pbs) - - def test_defaults(self): - self._helper() - - def test_w_transform(self): - self._helper(do_transform=True) - - def test_w_transform_and_empty_value(self): - # Exercise #5944 - self._helper(do_transform=True, empty_val=True) - - -class TestDocumentExtractorForMerge(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.DocumentExtractorForMerge - - def _make_one(self, document_data): - return self._get_target_class()(document_data) - - def test_ctor_w_empty_document(self): - document_data = {} - - inst = self._make_one(document_data) - - self.assertEqual(inst.data_merge, []) - self.assertEqual(inst.transform_merge, []) - self.assertEqual(inst.merge, []) - - def test_apply_merge_all_w_empty_document(self): - document_data = {} - inst = self._make_one(document_data) - - inst.apply_merge(True) - - self.assertEqual(inst.data_merge, []) - self.assertEqual(inst.transform_merge, []) - self.assertEqual(inst.merge, []) - self.assertFalse(inst.has_updates) - - def test_apply_merge_all_w_delete(self): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - document_data = {"write_me": "value", "delete_me": DELETE_FIELD} - inst = self._make_one(document_data) - - inst.apply_merge(True) - - expected_data_merge = [ - _make_field_path("delete_me"), - _make_field_path("write_me"), - ] - self.assertEqual(inst.data_merge, expected_data_merge) - self.assertEqual(inst.transform_merge, []) - self.assertEqual(inst.merge, expected_data_merge) - self.assertTrue(inst.has_updates) - - def test_apply_merge_all_w_server_timestamp(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_data = {"write_me": "value", "timestamp": SERVER_TIMESTAMP} - inst = self._make_one(document_data) - - inst.apply_merge(True) - - expected_data_merge = [_make_field_path("write_me")] - expected_transform_merge = [_make_field_path("timestamp")] - expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] - self.assertEqual(inst.data_merge, expected_data_merge) - self.assertEqual(inst.transform_merge, expected_transform_merge) - self.assertEqual(inst.merge, expected_merge) - self.assertTrue(inst.has_updates) - - def test_apply_merge_list_fields_w_empty_document(self): - document_data = {} - inst = self._make_one(document_data) - - with self.assertRaises(ValueError): - inst.apply_merge(["nonesuch", "or.this"]) - - def test_apply_merge_list_fields_w_unmerged_delete(self): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - document_data = { - "write_me": "value", - "delete_me": DELETE_FIELD, - "ignore_me": 123, - "unmerged_delete": DELETE_FIELD, - } - inst = self._make_one(document_data) - - with self.assertRaises(ValueError): - inst.apply_merge(["write_me", "delete_me"]) - - def test_apply_merge_list_fields_w_delete(self): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - document_data = { - "write_me": "value", - "delete_me": DELETE_FIELD, - "ignore_me": 123, - } - inst = self._make_one(document_data) - - inst.apply_merge(["write_me", "delete_me"]) - - expected_set_fields = {"write_me": "value"} - expected_deleted_fields = [_make_field_path("delete_me")] - self.assertEqual(inst.set_fields, expected_set_fields) - self.assertEqual(inst.deleted_fields, expected_deleted_fields) - self.assertTrue(inst.has_updates) - - def test_apply_merge_list_fields_w_prefixes(self): - - document_data = {"a": {"b": {"c": 123}}} - inst = self._make_one(document_data) - - with self.assertRaises(ValueError): - inst.apply_merge(["a", "a.b"]) - - def test_apply_merge_list_fields_w_missing_data_string_paths(self): - - document_data = {"write_me": "value", "ignore_me": 123} - inst = self._make_one(document_data) - - with self.assertRaises(ValueError): - inst.apply_merge(["write_me", "nonesuch"]) - - def test_apply_merge_list_fields_w_non_merge_field(self): - - document_data = {"write_me": "value", "ignore_me": 123} - inst = self._make_one(document_data) - - inst.apply_merge([_make_field_path("write_me")]) - - expected_set_fields = {"write_me": "value"} - self.assertEqual(inst.set_fields, expected_set_fields) - self.assertTrue(inst.has_updates) - - def test_apply_merge_list_fields_w_server_timestamp(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_data = { - "write_me": "value", - "timestamp": SERVER_TIMESTAMP, - "ignored_stamp": SERVER_TIMESTAMP, - } - inst = self._make_one(document_data) - - inst.apply_merge([_make_field_path("write_me"), _make_field_path("timestamp")]) - - expected_data_merge = [_make_field_path("write_me")] - expected_transform_merge = [_make_field_path("timestamp")] - expected_merge = [_make_field_path("timestamp"), _make_field_path("write_me")] - self.assertEqual(inst.data_merge, expected_data_merge) - self.assertEqual(inst.transform_merge, expected_transform_merge) - self.assertEqual(inst.merge, expected_merge) - expected_server_timestamps = [_make_field_path("timestamp")] - self.assertEqual(inst.server_timestamps, expected_server_timestamps) - self.assertTrue(inst.has_updates) - - def test_apply_merge_list_fields_w_array_remove(self): - from google.cloud.firestore_v1beta1.transforms import ArrayRemove - - values = [2, 4, 8] - document_data = { - "write_me": "value", - "remove_me": ArrayRemove(values), - "ignored_remove_me": ArrayRemove((1, 3, 5)), - } - inst = self._make_one(document_data) - - inst.apply_merge([_make_field_path("write_me"), _make_field_path("remove_me")]) - - expected_data_merge = [_make_field_path("write_me")] - expected_transform_merge = [_make_field_path("remove_me")] - expected_merge = [_make_field_path("remove_me"), _make_field_path("write_me")] - self.assertEqual(inst.data_merge, expected_data_merge) - self.assertEqual(inst.transform_merge, expected_transform_merge) - self.assertEqual(inst.merge, expected_merge) - expected_array_removes = {_make_field_path("remove_me"): values} - self.assertEqual(inst.array_removes, expected_array_removes) - self.assertTrue(inst.has_updates) - - def test_apply_merge_list_fields_w_array_union(self): - from google.cloud.firestore_v1beta1.transforms import ArrayUnion - - values = [1, 3, 5] - document_data = { - "write_me": "value", - "union_me": ArrayUnion(values), - "ignored_union_me": ArrayUnion((2, 4, 8)), - } - inst = self._make_one(document_data) - - inst.apply_merge([_make_field_path("write_me"), _make_field_path("union_me")]) - - expected_data_merge = [_make_field_path("write_me")] - expected_transform_merge = [_make_field_path("union_me")] - expected_merge = [_make_field_path("union_me"), _make_field_path("write_me")] - self.assertEqual(inst.data_merge, expected_data_merge) - self.assertEqual(inst.transform_merge, expected_transform_merge) - self.assertEqual(inst.merge, expected_merge) - expected_array_unions = {_make_field_path("union_me"): values} - self.assertEqual(inst.array_unions, expected_array_unions) - self.assertTrue(inst.has_updates) - - -class Test_pbs_for_set_with_merge(unittest.TestCase): - @staticmethod - def _call_fut(document_path, document_data, merge): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.pbs_for_set_with_merge( - document_path, document_data, merge=merge - ) - - @staticmethod - def _make_write_w_document(document_path, **data): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1._helpers import encode_dict - - return write.Write( - update=document.Document(name=document_path, fields=encode_dict(data)) - ) - - @staticmethod - def _make_write_w_transform(document_path, fields): - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import DocumentTransform - - server_val = DocumentTransform.FieldTransform.ServerValue - transforms = [ - write.DocumentTransform.FieldTransform( - field_path=field, set_to_server_value=server_val.REQUEST_TIME - ) - for field in fields - ] - - return write.Write( - transform=write.DocumentTransform( - document=document_path, field_transforms=transforms - ) - ) - - @staticmethod - def _update_document_mask(update_pb, field_paths): - from google.cloud.firestore_v1beta1.types import common - - update_pb._pb.update_mask.CopyFrom( - common.DocumentMask(field_paths=sorted(field_paths))._pb - ) - - def test_with_merge_true_wo_transform(self): - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {"cheese": 1.5, "crackers": True} - - write_pbs = self._call_fut(document_path, document_data, merge=True) - - update_pb = self._make_write_w_document(document_path, **document_data) - self._update_document_mask(update_pb, field_paths=sorted(document_data)) - expected_pbs = [update_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_with_merge_field_wo_transform(self): - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - document_data = {"cheese": 1.5, "crackers": True} - - write_pbs = self._call_fut(document_path, document_data, merge=["cheese"]) - - update_pb = self._make_write_w_document( - document_path, cheese=document_data["cheese"] - ) - self._update_document_mask(update_pb, field_paths=["cheese"]) - expected_pbs = [update_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_with_merge_true_w_transform(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - update_data = {"cheese": 1.5, "crackers": True} - document_data = update_data.copy() - document_data["butter"] = SERVER_TIMESTAMP - - write_pbs = self._call_fut(document_path, document_data, merge=True) - - update_pb = self._make_write_w_document(document_path, **update_data) - self._update_document_mask(update_pb, field_paths=sorted(update_data)) - transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) - expected_pbs = [update_pb, transform_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_with_merge_field_w_transform(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - update_data = {"cheese": 1.5, "crackers": True} - document_data = update_data.copy() - document_data["butter"] = SERVER_TIMESTAMP - - write_pbs = self._call_fut( - document_path, document_data, merge=["cheese", "butter"] - ) - - update_pb = self._make_write_w_document( - document_path, cheese=document_data["cheese"] - ) - self._update_document_mask(update_pb, ["cheese"]) - transform_pb = self._make_write_w_transform(document_path, fields=["butter"]) - expected_pbs = [update_pb, transform_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_with_merge_field_w_transform_masking_simple(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - update_data = {"cheese": 1.5, "crackers": True} - document_data = update_data.copy() - document_data["butter"] = {"pecan": SERVER_TIMESTAMP} - - write_pbs = self._call_fut(document_path, document_data, merge=["butter.pecan"]) - - update_pb = self._make_write_w_document(document_path) - transform_pb = self._make_write_w_transform( - document_path, fields=["butter.pecan"] - ) - expected_pbs = [update_pb, transform_pb] - self.assertEqual(write_pbs, expected_pbs) - - def test_with_merge_field_w_transform_parent(self): - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - - document_path = _make_ref_string(u"little", u"town", u"of", u"ham") - update_data = {"cheese": 1.5, "crackers": True} - document_data = update_data.copy() - document_data["butter"] = {"popcorn": "yum", "pecan": SERVER_TIMESTAMP} - - write_pbs = self._call_fut( - document_path, document_data, merge=["cheese", "butter"] - ) - - update_pb = self._make_write_w_document( - document_path, cheese=update_data["cheese"], butter={"popcorn": "yum"} - ) - self._update_document_mask(update_pb, ["cheese", "butter"]) - transform_pb = self._make_write_w_transform( - document_path, fields=["butter.pecan"] - ) - expected_pbs = [update_pb, transform_pb] - self.assertEqual(write_pbs, expected_pbs) - - -class TestDocumentExtractorForUpdate(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1 import _helpers - - return _helpers.DocumentExtractorForUpdate - - def _make_one(self, document_data): - return self._get_target_class()(document_data) - - def test_ctor_w_empty_document(self): - document_data = {} - - inst = self._make_one(document_data) - self.assertEqual(inst.top_level_paths, []) - - def test_ctor_w_simple_keys(self): - document_data = {"a": 1, "b": 2, "c": 3} - - expected_paths = [ - _make_field_path("a"), - _make_field_path("b"), - _make_field_path("c"), - ] - inst = self._make_one(document_data) - self.assertEqual(inst.top_level_paths, expected_paths) - - def test_ctor_w_nested_keys(self): - document_data = {"a": {"d": {"e": 1}}, "b": {"f": 7}, "c": 3} - - expected_paths = [ - _make_field_path("a"), - _make_field_path("b"), - _make_field_path("c"), - ] - inst = self._make_one(document_data) - self.assertEqual(inst.top_level_paths, expected_paths) - - def test_ctor_w_dotted_keys(self): - document_data = {"a.d.e": 1, "b.f": 7, "c": 3} - - expected_paths = [ - _make_field_path("a", "d", "e"), - _make_field_path("b", "f"), - _make_field_path("c"), - ] - inst = self._make_one(document_data) - self.assertEqual(inst.top_level_paths, expected_paths) - - def test_ctor_w_nested_dotted_keys(self): - document_data = {"a.d.e": 1, "b.f": {"h.i": 9}, "c": 3} - - expected_paths = [ - _make_field_path("a", "d", "e"), - _make_field_path("b", "f"), - _make_field_path("c"), - ] - expected_set_fields = {"a": {"d": {"e": 1}}, "b": {"f": {"h.i": 9}}, "c": 3} - inst = self._make_one(document_data) - self.assertEqual(inst.top_level_paths, expected_paths) - self.assertEqual(inst.set_fields, expected_set_fields) - - -class Test_pbs_for_update(unittest.TestCase): - @staticmethod - def _call_fut(document_path, field_updates, option): - from google.cloud.firestore_v1beta1._helpers import pbs_for_update - - return pbs_for_update(document_path, field_updates, option) - - def _helper(self, option=None, do_transform=False, **write_kwargs): - from google.cloud.firestore_v1beta1 import _helpers - from google.cloud.firestore_v1beta1.field_path import FieldPath - from google.cloud.firestore_v1beta1.transforms import SERVER_TIMESTAMP - from google.cloud.firestore_v1beta1 import DocumentTransform - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - - document_path = _make_ref_string(u"toy", u"car", u"onion", u"garlic") - field_path1 = "bitez.yum" - value = b"\x00\x01" - field_path2 = "blog.internet" - - field_updates = {field_path1: value} - if do_transform: - field_updates[field_path2] = SERVER_TIMESTAMP - - write_pbs = self._call_fut(document_path, field_updates, option) - - map_pb = document.MapValue(fields={"yum": _value_pb(bytes_value=value)}) - - field_paths = [field_path1] - - expected_update_pb = write.Write( - update=document.Document( - name=document_path, fields={"bitez": _value_pb(map_value=map_pb)} - ), - update_mask=common.DocumentMask(field_paths=field_paths), - **write_kwargs - ) - if isinstance(option, _helpers.ExistsOption): - precondition = common.Precondition(exists=False) - expected_update_pb._pb.current_document.CopyFrom(precondition._pb) - expected_pbs = [expected_update_pb] - if do_transform: - transform_paths = FieldPath.from_string(field_path2) - server_val = DocumentTransform.FieldTransform.ServerValue - expected_transform_pb = write.Write( - transform=write.DocumentTransform( - document=document_path, - field_transforms=[ - write.DocumentTransform.FieldTransform( - field_path=transform_paths.to_api_repr(), - set_to_server_value=server_val.REQUEST_TIME, - ) - ], - ) - ) - expected_pbs.append(expected_transform_pb) - self.assertEqual(write_pbs, expected_pbs) - - def test_without_option(self): - from google.cloud.firestore_v1beta1.types import common - - precondition = common.Precondition(exists=True) - self._helper(current_document=precondition) - - def test_with_exists_option(self): - from google.cloud.firestore_v1beta1.client import _helpers - - option = _helpers.ExistsOption(False) - self._helper(option=option) - - def test_update_and_transform(self): - from google.cloud.firestore_v1beta1.types import common - - precondition = common.Precondition(exists=True) - self._helper(current_document=precondition, do_transform=True) - - -class Test_pb_for_delete(unittest.TestCase): - @staticmethod - def _call_fut(document_path, option): - from google.cloud.firestore_v1beta1._helpers import pb_for_delete - - return pb_for_delete(document_path, option) - - def _helper(self, option=None, **write_kwargs): - from google.cloud.firestore_v1beta1.types import write - - document_path = _make_ref_string(u"chicken", u"philly", u"one", u"two") - write_pb = self._call_fut(document_path, option) - - expected_pb = write.Write(delete=document_path, **write_kwargs) - self.assertEqual(write_pb, expected_pb) - - def test_without_option(self): - self._helper() - - def test_with_option(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1 import _helpers - - update_time = timestamp_pb2.Timestamp(seconds=1309700594, nanos=822211297) - option = _helpers.LastUpdateOption(update_time) - precondition = common.Precondition(update_time=update_time) - self._helper(option=option, current_document=precondition) - - -class Test_get_transaction_id(unittest.TestCase): - @staticmethod - def _call_fut(transaction, **kwargs): - from google.cloud.firestore_v1beta1._helpers import get_transaction_id - - return get_transaction_id(transaction, **kwargs) - - def test_no_transaction(self): - ret_val = self._call_fut(None) - self.assertIsNone(ret_val) - - def test_invalid_transaction(self): - from google.cloud.firestore_v1beta1.transaction import Transaction - - transaction = Transaction(mock.sentinel.client) - self.assertFalse(transaction.in_progress) - with self.assertRaises(ValueError): - self._call_fut(transaction) - - def test_after_writes_not_allowed(self): - from google.cloud.firestore_v1beta1._helpers import ReadAfterWriteError - from google.cloud.firestore_v1beta1.transaction import Transaction - - transaction = Transaction(mock.sentinel.client) - transaction._id = b"under-hook" - transaction._write_pbs.append(mock.sentinel.write) - - with self.assertRaises(ReadAfterWriteError): - self._call_fut(transaction) - - def test_after_writes_allowed(self): - from google.cloud.firestore_v1beta1.transaction import Transaction - - transaction = Transaction(mock.sentinel.client) - txn_id = b"we-are-0fine" - transaction._id = txn_id - transaction._write_pbs.append(mock.sentinel.write) - - ret_val = self._call_fut(transaction, read_operation=False) - self.assertEqual(ret_val, txn_id) - - def test_good_transaction(self): - from google.cloud.firestore_v1beta1.transaction import Transaction - - transaction = Transaction(mock.sentinel.client) - txn_id = b"doubt-it" - transaction._id = txn_id - self.assertTrue(transaction.in_progress) - - self.assertEqual(self._call_fut(transaction), txn_id) - - -class Test_metadata_with_prefix(unittest.TestCase): - @staticmethod - def _call_fut(database_string): - from google.cloud.firestore_v1beta1._helpers import metadata_with_prefix - - return metadata_with_prefix(database_string) - - def test_it(self): - database_string = u"projects/prahj/databases/dee-bee" - metadata = self._call_fut(database_string) - - self.assertEqual(metadata, [("google-cloud-resource-prefix", database_string)]) - - -class TestWriteOption(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1._helpers import WriteOption - - return WriteOption - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_modify_write(self): - option = self._make_one() - with self.assertRaises(NotImplementedError): - option.modify_write(None) - - -class TestLastUpdateOption(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1._helpers import LastUpdateOption - - return LastUpdateOption - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - option = self._make_one(mock.sentinel.timestamp) - self.assertIs(option._last_update_time, mock.sentinel.timestamp) - - def test___eq___different_type(self): - option = self._make_one(mock.sentinel.timestamp) - other = object() - self.assertFalse(option == other) - - def test___eq___different_timestamp(self): - option = self._make_one(mock.sentinel.timestamp) - other = self._make_one(mock.sentinel.other_timestamp) - self.assertFalse(option == other) - - def test___eq___same_timestamp(self): - option = self._make_one(mock.sentinel.timestamp) - other = self._make_one(mock.sentinel.timestamp) - self.assertTrue(option == other) - - def test_modify_write_update_time(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import write - - timestamp_pb = timestamp_pb2.Timestamp(seconds=683893592, nanos=229362000) - option = self._make_one(timestamp_pb) - write_pb = write.Write() - ret_val = option.modify_write(write_pb) - - self.assertIsNone(ret_val) - expected_doc = common.Precondition(update_time=timestamp_pb) - self.assertEqual(write_pb.current_document, expected_doc) - - -class TestExistsOption(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1._helpers import ExistsOption - - return ExistsOption - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - option = self._make_one(mock.sentinel.totes_bool) - self.assertIs(option._exists, mock.sentinel.totes_bool) - - def test___eq___different_type(self): - option = self._make_one(mock.sentinel.timestamp) - other = object() - self.assertFalse(option == other) - - def test___eq___different_exists(self): - option = self._make_one(True) - other = self._make_one(False) - self.assertFalse(option == other) - - def test___eq___same_exists(self): - option = self._make_one(True) - other = self._make_one(True) - self.assertTrue(option == other) - - def test_modify_write(self): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import write - - for exists in (True, False): - option = self._make_one(exists) - write_pb = write.Write() - ret_val = option.modify_write(write_pb) - - self.assertIsNone(ret_val) - expected_doc = common.Precondition(exists=exists) - self.assertEqual(write_pb.current_document, expected_doc) - - -def _value_pb(**kwargs): - from google.cloud.firestore_v1beta1.types.document import Value - - return Value(**kwargs) - - -def _make_ref_string(project, database, *path): - from google.cloud.firestore_v1beta1 import _helpers - - doc_rel_path = _helpers.DOCUMENT_PATH_DELIMITER.join(path) - return u"projects/{}/databases/{}/documents/{}".format( - project, database, doc_rel_path - ) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="quark"): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - return Client(project=project, credentials=credentials) - - -def _make_field_path(*fields): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.FieldPath(*fields) diff --git a/tests/unit/v1beta1/test_batch.py b/tests/unit/v1beta1/test_batch.py deleted file mode 100644 index aa64de733..000000000 --- a/tests/unit/v1beta1/test_batch.py +++ /dev/null @@ -1,280 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock -import pytest - - -class TestWriteBatch(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.batch import WriteBatch - - return WriteBatch - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - batch = self._make_one(mock.sentinel.client) - self.assertIs(batch._client, mock.sentinel.client) - self.assertEqual(batch._write_pbs, []) - self.assertIsNone(batch.write_results) - self.assertIsNone(batch.commit_time) - - def test__add_write_pbs(self): - batch = self._make_one(mock.sentinel.client) - self.assertEqual(batch._write_pbs, []) - batch._add_write_pbs([mock.sentinel.write1, mock.sentinel.write2]) - self.assertEqual(batch._write_pbs, [mock.sentinel.write1, mock.sentinel.write2]) - - def test_create(self): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - - client = _make_client() - batch = self._make_one(client) - self.assertEqual(batch._write_pbs, []) - - reference = client.document("this", "one") - document_data = {"a": 10, "b": 2.5} - ret_val = batch.create(reference, document_data) - self.assertIsNone(ret_val) - new_write_pb = write.Write( - update=document.Document( - name=reference._document_path, - fields={ - "a": _value_pb(integer_value=document_data["a"]), - "b": _value_pb(double_value=document_data["b"]), - }, - ), - current_document=common.Precondition(exists=False), - ) - self.assertEqual(batch._write_pbs, [new_write_pb]) - - def test_set(self): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - - client = _make_client() - batch = self._make_one(client) - self.assertEqual(batch._write_pbs, []) - - reference = client.document("another", "one") - field = "zapzap" - value = u"meadows and flowers" - document_data = {field: value} - ret_val = batch.set(reference, document_data) - self.assertIsNone(ret_val) - new_write_pb = write.Write( - update=document.Document( - name=reference._document_path, - fields={field: _value_pb(string_value=value)}, - ) - ) - self.assertEqual(batch._write_pbs, [new_write_pb]) - - def test_set_merge(self): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - - client = _make_client() - batch = self._make_one(client) - self.assertEqual(batch._write_pbs, []) - - reference = client.document("another", "one") - field = "zapzap" - value = u"meadows and flowers" - document_data = {field: value} - ret_val = batch.set(reference, document_data, merge=True) - self.assertIsNone(ret_val) - new_write_pb = write.Write( - update=document.Document( - name=reference._document_path, - fields={field: _value_pb(string_value=value)}, - ), - update_mask={"field_paths": [field]}, - ) - self.assertEqual(batch._write_pbs, [new_write_pb]) - - def test_update(self): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - - client = _make_client() - batch = self._make_one(client) - self.assertEqual(batch._write_pbs, []) - - reference = client.document("cats", "cradle") - field_path = "head.foot" - value = u"knees toes shoulders" - field_updates = {field_path: value} - - ret_val = batch.update(reference, field_updates) - self.assertIsNone(ret_val) - - map_pb = document.MapValue(fields={"foot": _value_pb(string_value=value)}) - new_write_pb = write.Write( - update=document.Document( - name=reference._document_path, - fields={"head": _value_pb(map_value=map_pb)}, - ), - update_mask=common.DocumentMask(field_paths=[field_path]), - current_document=common.Precondition(exists=True), - ) - self.assertEqual(batch._write_pbs, [new_write_pb]) - - def test_delete(self): - from google.cloud.firestore_v1beta1.types import write - - client = _make_client() - batch = self._make_one(client) - self.assertEqual(batch._write_pbs, []) - - reference = client.document("early", "mornin", "dawn", "now") - ret_val = batch.delete(reference) - self.assertIsNone(ret_val) - new_write_pb = write.Write(delete=reference._document_path) - self.assertEqual(batch._write_pbs, [new_write_pb]) - - def test_commit(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud.firestore_v1beta1.types import write - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.Mock(spec=["commit"]) - timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798) - commit_response = firestore.CommitResponse( - write_results=[write.WriteResult(), write.WriteResult()], - commit_time=timestamp, - ) - firestore_api.commit.return_value = commit_response - - # Attach the fake GAPIC to a real client. - client = _make_client("grand") - client._firestore_api_internal = firestore_api - - # Actually make a batch with some mutations and call commit(). - batch = self._make_one(client) - document1 = client.document("a", "b") - batch.create(document1, {"ten": 10, "buck": u"ets"}) - document2 = client.document("c", "d", "e", "f") - batch.delete(document2) - write_pbs = batch._write_pbs[::] - - write_results = batch.commit() - self.assertEqual(write_results, list(commit_response.write_results)) - self.assertEqual(batch.write_results, write_results) - # TODO(microgen): v2: commit time is already a datetime, though not with nano - # self.assertEqual(batch.commit_time, timestamp) - # Make sure batch has no more "changes". - self.assertEqual(batch._write_pbs, []) - - # Verify the mocks. - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_as_context_mgr_wo_error(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud.firestore_v1beta1.types import write - - firestore_api = mock.Mock(spec=["commit"]) - timestamp = timestamp_pb2.Timestamp(seconds=1234567, nanos=123456798) - commit_response = firestore.CommitResponse( - write_results=[write.WriteResult(), write.WriteResult()], - commit_time=timestamp, - ) - firestore_api.commit.return_value = commit_response - client = _make_client() - client._firestore_api_internal = firestore_api - batch = self._make_one(client) - document1 = client.document("a", "b") - document2 = client.document("c", "d", "e", "f") - - with batch as ctx_mgr: - self.assertIs(ctx_mgr, batch) - ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"}) - ctx_mgr.delete(document2) - write_pbs = batch._write_pbs[::] - - self.assertEqual(batch.write_results, list(commit_response.write_results)) - # TODO(microgen): v2: commit time is already a datetime, though not with nano - # self.assertEqual(batch.commit_time, timestamp) - # Make sure batch has no more "changes". - self.assertEqual(batch._write_pbs, []) - - # Verify the mocks. - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_as_context_mgr_w_error(self): - firestore_api = mock.Mock(spec=["commit"]) - client = _make_client() - client._firestore_api_internal = firestore_api - batch = self._make_one(client) - document1 = client.document("a", "b") - document2 = client.document("c", "d", "e", "f") - - with self.assertRaises(RuntimeError): - with batch as ctx_mgr: - ctx_mgr.create(document1, {"ten": 10, "buck": u"ets"}) - ctx_mgr.delete(document2) - raise RuntimeError("testing") - - self.assertIsNone(batch.write_results) - self.assertIsNone(batch.commit_time) - # batch still has its changes - self.assertEqual(len(batch._write_pbs), 2) - - firestore_api.commit.assert_not_called() - - -def _value_pb(**kwargs): - from google.cloud.firestore_v1beta1.types.document import Value - - return Value(**kwargs) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="seventy-nine"): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - return Client(project=project, credentials=credentials) diff --git a/tests/unit/v1beta1/test_client.py b/tests/unit/v1beta1/test_client.py deleted file mode 100644 index 8f753b760..000000000 --- a/tests/unit/v1beta1/test_client.py +++ /dev/null @@ -1,677 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import types -import unittest - -import mock -import pytest - - -class TestClient(unittest.TestCase): - - PROJECT = "my-prahjekt" - - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.client import Client - - return Client - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def _make_default_one(self): - credentials = _make_credentials() - return self._make_one(project=self.PROJECT, credentials=credentials) - - def test_constructor(self): - from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - - credentials = _make_credentials() - - with pytest.deprecated_call(): - client = self._make_one(project=self.PROJECT, credentials=credentials) - - self.assertEqual(client.project, self.PROJECT) - self.assertEqual(client._credentials, credentials) - self.assertEqual(client._database, DEFAULT_DATABASE) - - def test_constructor_explicit(self): - credentials = _make_credentials() - database = "now-db" - - with pytest.deprecated_call(): - client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database - ) - - self.assertEqual(client.project, self.PROJECT) - self.assertEqual(client._credentials, credentials) - self.assertEqual(client._database, database) - - @mock.patch( - "google.cloud.firestore_v1beta1.services.firestore.client." "FirestoreClient", - autospec=True, - return_value=mock.sentinel.firestore_api, - ) - def test__firestore_api_property(self, mock_client): - mock_client.DEFAULT_ENDPOINT = "endpoint" - - with pytest.deprecated_call(): - client = self._make_default_one() - - self.assertIsNone(client._firestore_api_internal) - firestore_api = client._firestore_api - self.assertIs(firestore_api, mock_client.return_value) - self.assertIs(firestore_api, client._firestore_api_internal) - mock_client.assert_called_once_with(transport=client._transport) - - # Call again to show that it is cached, but call count is still 1. - self.assertIs(client._firestore_api, mock_client.return_value) - self.assertEqual(mock_client.call_count, 1) - - def test___database_string_property(self): - credentials = _make_credentials() - database = "cheeeeez" - - with pytest.deprecated_call(): - client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database - ) - - self.assertIsNone(client._database_string_internal) - database_string = client._database_string - expected = "projects/{}/databases/{}".format(client.project, client._database) - self.assertEqual(database_string, expected) - self.assertIs(database_string, client._database_string_internal) - - # Swap it out with a unique value to verify it is cached. - client._database_string_internal = mock.sentinel.cached - self.assertIs(client._database_string, mock.sentinel.cached) - - def test___rpc_metadata_property(self): - credentials = _make_credentials() - database = "quanta" - - with pytest.deprecated_call(): - client = self._make_one( - project=self.PROJECT, credentials=credentials, database=database - ) - - self.assertEqual( - client._rpc_metadata, - [("google-cloud-resource-prefix", client._database_string)], - ) - - def test_collection_factory(self): - from google.cloud.firestore_v1beta1.collection import CollectionReference - - collection_id = "users" - - with pytest.deprecated_call(): - client = self._make_default_one() - - collection = client.collection(collection_id) - - self.assertEqual(collection._path, (collection_id,)) - self.assertIs(collection._client, client) - self.assertIsInstance(collection, CollectionReference) - - def test_collection_factory_nested(self): - from google.cloud.firestore_v1beta1.collection import CollectionReference - - with pytest.deprecated_call(): - client = self._make_default_one() - - parts = ("users", "alovelace", "beep") - collection_path = "/".join(parts) - collection1 = client.collection(collection_path) - - self.assertEqual(collection1._path, parts) - self.assertIs(collection1._client, client) - self.assertIsInstance(collection1, CollectionReference) - - # Make sure using segments gives the same result. - collection2 = client.collection(*parts) - self.assertEqual(collection2._path, parts) - self.assertIs(collection2._client, client) - self.assertIsInstance(collection2, CollectionReference) - - def test_document_factory(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - parts = ("rooms", "roomA") - - with pytest.deprecated_call(): - client = self._make_default_one() - - doc_path = "/".join(parts) - document1 = client.document(doc_path) - - self.assertEqual(document1._path, parts) - self.assertIs(document1._client, client) - self.assertIsInstance(document1, DocumentReference) - - # Make sure using segments gives the same result. - document2 = client.document(*parts) - self.assertEqual(document2._path, parts) - self.assertIs(document2._client, client) - self.assertIsInstance(document2, DocumentReference) - - def test_document_factory_nested(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - with pytest.deprecated_call(): - client = self._make_default_one() - - parts = ("rooms", "roomA", "shoes", "dressy") - doc_path = "/".join(parts) - document1 = client.document(doc_path) - - self.assertEqual(document1._path, parts) - self.assertIs(document1._client, client) - self.assertIsInstance(document1, DocumentReference) - - # Make sure using segments gives the same result. - document2 = client.document(*parts) - self.assertEqual(document2._path, parts) - self.assertIs(document2._client, client) - self.assertIsInstance(document2, DocumentReference) - - def test_field_path(self): - klass = self._get_target_class() - self.assertEqual(klass.field_path("a", "b", "c"), "a.b.c") - - def test_write_option_last_update(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1._helpers import LastUpdateOption - - timestamp = timestamp_pb2.Timestamp(seconds=1299767599, nanos=811111097) - - klass = self._get_target_class() - option = klass.write_option(last_update_time=timestamp) - self.assertIsInstance(option, LastUpdateOption) - self.assertEqual(option._last_update_time, timestamp) - - def test_write_option_exists(self): - from google.cloud.firestore_v1beta1._helpers import ExistsOption - - klass = self._get_target_class() - - option1 = klass.write_option(exists=False) - self.assertIsInstance(option1, ExistsOption) - self.assertFalse(option1._exists) - - option2 = klass.write_option(exists=True) - self.assertIsInstance(option2, ExistsOption) - self.assertTrue(option2._exists) - - def test_write_open_neither_arg(self): - from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR - - klass = self._get_target_class() - with self.assertRaises(TypeError) as exc_info: - klass.write_option() - - self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,)) - - def test_write_multiple_args(self): - from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR - - klass = self._get_target_class() - with self.assertRaises(TypeError) as exc_info: - klass.write_option(exists=False, last_update_time=mock.sentinel.timestamp) - - self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR,)) - - def test_write_bad_arg(self): - from google.cloud.firestore_v1beta1.client import _BAD_OPTION_ERR - - klass = self._get_target_class() - with self.assertRaises(TypeError) as exc_info: - klass.write_option(spinach="popeye") - - extra = "{!r} was provided".format("spinach") - self.assertEqual(exc_info.exception.args, (_BAD_OPTION_ERR, extra)) - - def test_collections(self): - from google.api_core.page_iterator import Iterator - from google.api_core.page_iterator import Page - from google.cloud.firestore_v1beta1.collection import CollectionReference - - collection_ids = ["users", "projects"] - - with pytest.deprecated_call(): - client = self._make_default_one() - - firestore_api = mock.Mock(spec=["list_collection_ids"]) - client._firestore_api_internal = firestore_api - - class _Iterator(Iterator): - def __init__(self, pages): - super(_Iterator, self).__init__(client=None) - self._pages = pages - - def _next_page(self): - if self._pages: - page, self._pages = self._pages[0], self._pages[1:] - return Page(self, page, self.item_to_value) - - iterator = _Iterator(pages=[collection_ids]) - firestore_api.list_collection_ids.return_value = iterator - - collections = list(client.collections()) - - self.assertEqual(len(collections), len(collection_ids)) - for collection, collection_id in zip(collections, collection_ids): - self.assertIsInstance(collection, CollectionReference) - self.assertEqual(collection.parent, None) - self.assertEqual(collection.id, collection_id) - - firestore_api.list_collection_ids.assert_called_once_with( - request={"parent": client._database_string}, metadata=client._rpc_metadata - ) - - def _get_all_helper(self, client, references, document_pbs, **kwargs): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["batch_get_documents"]) - response_iterator = iter(document_pbs) - firestore_api.batch_get_documents.return_value = response_iterator - - # Attach the fake GAPIC to a real client. - client._firestore_api_internal = firestore_api - - # Actually call get_all(). - snapshots = client.get_all(references, **kwargs) - self.assertIsInstance(snapshots, types.GeneratorType) - - return list(snapshots) - - def _info_for_get_all(self, data1, data2): - - with pytest.deprecated_call(): - client = self._make_default_one() - - document1 = client.document("pineapple", "lamp1") - document2 = client.document("pineapple", "lamp2") - - # Make response protobufs. - document_pb1, read_time = _doc_get_info(document1._document_path, data1) - response1 = _make_batch_response(found=document_pb1, read_time=read_time) - - document, read_time = _doc_get_info(document2._document_path, data2) - response2 = _make_batch_response(found=document, read_time=read_time) - - return client, document1, document2, response1, response2 - - def test_get_all(self): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - data1 = {"a": u"cheese"} - data2 = {"b": True, "c": 18} - info = self._info_for_get_all(data1, data2) - client, document1, document2, response1, response2 = info - - # Exercise the mocked ``batch_get_documents``. - field_paths = ["a", "b"] - snapshots = self._get_all_helper( - client, - [document1, document2], - [response1, response2], - field_paths=field_paths, - ) - self.assertEqual(len(snapshots), 2) - - snapshot1 = snapshots[0] - self.assertIsInstance(snapshot1, DocumentSnapshot) - self.assertIs(snapshot1._reference, document1) - self.assertEqual(snapshot1._data, data1) - - snapshot2 = snapshots[1] - self.assertIsInstance(snapshot2, DocumentSnapshot) - self.assertIs(snapshot2._reference, document2) - self.assertEqual(snapshot2._data, data2) - - # Verify the call to the mock. - doc_paths = [document1._document_path, document2._document_path] - mask = common.DocumentMask(field_paths=field_paths) - client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": mask, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_get_all_with_transaction(self): - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - data = {"so-much": 484} - info = self._info_for_get_all(data, {}) - client, document, _, response, _ = info - transaction = client.transaction() - txn_id = b"the-man-is-non-stop" - transaction._id = txn_id - - # Exercise the mocked ``batch_get_documents``. - snapshots = self._get_all_helper( - client, [document], [response], transaction=transaction - ) - self.assertEqual(len(snapshots), 1) - - snapshot = snapshots[0] - self.assertIsInstance(snapshot, DocumentSnapshot) - self.assertIs(snapshot._reference, document) - self.assertEqual(snapshot._data, data) - - # Verify the call to the mock. - doc_paths = [document._document_path] - client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": None, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - def test_get_all_unknown_result(self): - from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE - - info = self._info_for_get_all({"z": 28.5}, {}) - client, document, _, _, response = info - - # Exercise the mocked ``batch_get_documents``. - with self.assertRaises(ValueError) as exc_info: - self._get_all_helper(client, [document], [response]) - - err_msg = _BAD_DOC_TEMPLATE.format(response.found.name) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - # Verify the call to the mock. - doc_paths = [document._document_path] - client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": None, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_get_all_wrong_order(self): - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - data1 = {"up": 10} - data2 = {"down": -10} - info = self._info_for_get_all(data1, data2) - client, document1, document2, response1, response2 = info - document3 = client.document("pineapple", "lamp3") - response3 = _make_batch_response(missing=document3._document_path) - - # Exercise the mocked ``batch_get_documents``. - snapshots = self._get_all_helper( - client, [document1, document2, document3], [response2, response1, response3] - ) - - self.assertEqual(len(snapshots), 3) - - snapshot1 = snapshots[0] - self.assertIsInstance(snapshot1, DocumentSnapshot) - self.assertIs(snapshot1._reference, document2) - self.assertEqual(snapshot1._data, data2) - - snapshot2 = snapshots[1] - self.assertIsInstance(snapshot2, DocumentSnapshot) - self.assertIs(snapshot2._reference, document1) - self.assertEqual(snapshot2._data, data1) - - self.assertFalse(snapshots[2].exists) - - # Verify the call to the mock. - doc_paths = [ - document1._document_path, - document2._document_path, - document3._document_path, - ] - client._firestore_api.batch_get_documents.assert_called_once_with( - request={ - "database": client._database_string, - "documents": doc_paths, - "mask": None, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_batch(self): - from google.cloud.firestore_v1beta1.batch import WriteBatch - - with pytest.deprecated_call(): - client = self._make_default_one() - - batch = client.batch() - self.assertIsInstance(batch, WriteBatch) - self.assertIs(batch._client, client) - self.assertEqual(batch._write_pbs, []) - - def test_transaction(self): - from google.cloud.firestore_v1beta1.transaction import Transaction - - with pytest.deprecated_call(): - client = self._make_default_one() - - transaction = client.transaction(max_attempts=3, read_only=True) - self.assertIsInstance(transaction, Transaction) - self.assertEqual(transaction._write_pbs, []) - self.assertEqual(transaction._max_attempts, 3) - self.assertTrue(transaction._read_only) - self.assertIsNone(transaction._id) - - -class Test__reference_info(unittest.TestCase): - @staticmethod - def _call_fut(references): - from google.cloud.firestore_v1beta1.client import _reference_info - - return _reference_info(references) - - def test_it(self): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - client = Client(project="hi-projject", credentials=credentials) - - reference1 = client.document("a", "b") - reference2 = client.document("a", "b", "c", "d") - reference3 = client.document("a", "b") - reference4 = client.document("f", "g") - - doc_path1 = reference1._document_path - doc_path2 = reference2._document_path - doc_path3 = reference3._document_path - doc_path4 = reference4._document_path - self.assertEqual(doc_path1, doc_path3) - - document_paths, reference_map = self._call_fut( - [reference1, reference2, reference3, reference4] - ) - self.assertEqual(document_paths, [doc_path1, doc_path2, doc_path3, doc_path4]) - # reference3 over-rides reference1. - expected_map = { - doc_path2: reference2, - doc_path3: reference3, - doc_path4: reference4, - } - self.assertEqual(reference_map, expected_map) - - -class Test__get_reference(unittest.TestCase): - @staticmethod - def _call_fut(document_path, reference_map): - from google.cloud.firestore_v1beta1.client import _get_reference - - return _get_reference(document_path, reference_map) - - def test_success(self): - doc_path = "a/b/c" - reference_map = {doc_path: mock.sentinel.reference} - self.assertIs(self._call_fut(doc_path, reference_map), mock.sentinel.reference) - - def test_failure(self): - from google.cloud.firestore_v1beta1.client import _BAD_DOC_TEMPLATE - - doc_path = "1/888/call-now" - with self.assertRaises(ValueError) as exc_info: - self._call_fut(doc_path, {}) - - err_msg = _BAD_DOC_TEMPLATE.format(doc_path) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - -class Test__parse_batch_get(unittest.TestCase): - @staticmethod - def _call_fut(get_doc_response, reference_map, client=mock.sentinel.client): - from google.cloud.firestore_v1beta1.client import _parse_batch_get - - return _parse_batch_get(get_doc_response, reference_map, client) - - @staticmethod - def _dummy_ref_string(): - from google.cloud.firestore_v1beta1.client import DEFAULT_DATABASE - - project = u"bazzzz" - collection_id = u"fizz" - document_id = u"buzz" - return u"projects/{}/databases/{}/documents/{}/{}".format( - project, DEFAULT_DATABASE, collection_id, document_id - ) - - def test_found(self): - from google.cloud.firestore_v1beta1.types import document - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - now = datetime.datetime.utcnow() - read_time = _datetime_to_pb_timestamp(now) - delta = datetime.timedelta(seconds=100) - update_time = _datetime_to_pb_timestamp(now - delta) - create_time = _datetime_to_pb_timestamp(now - 2 * delta) - - ref_string = self._dummy_ref_string() - document_pb = document.Document( - name=ref_string, - fields={ - "foo": document.Value(double_value=1.5), - "bar": document.Value(string_value=u"skillz"), - }, - create_time=create_time, - update_time=update_time, - ) - response_pb = _make_batch_response(found=document_pb, read_time=read_time) - - reference_map = {ref_string: mock.sentinel.reference} - snapshot = self._call_fut(response_pb, reference_map) - self.assertIsInstance(snapshot, DocumentSnapshot) - self.assertIs(snapshot._reference, mock.sentinel.reference) - self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"}) - self.assertTrue(snapshot._exists) - # TODO(microgen): v2: datetimewithnanos - # self.assertEqual(snapshot.read_time, read_time) - # self.assertEqual(snapshot.create_time, create_time) - # self.assertEqual(snapshot.update_time, update_time) - - def test_missing(self): - ref_string = self._dummy_ref_string() - response_pb = _make_batch_response(missing=ref_string) - - snapshot = self._call_fut(response_pb, {}) - self.assertFalse(snapshot.exists) - - def test_unset_result_type(self): - response_pb = _make_batch_response() - with self.assertRaises(ValueError): - self._call_fut(response_pb, {}) - - def test_unknown_result_type(self): - response_pb = mock.Mock() - response_pb._pb.mock_add_spec(spec=["WhichOneof"]) - response_pb._pb.WhichOneof.return_value = "zoob_value" - - with self.assertRaises(ValueError): - self._call_fut(response_pb, {}) - - response_pb._pb.WhichOneof.assert_called_once_with("result") - - -class Test__get_doc_mask(unittest.TestCase): - @staticmethod - def _call_fut(field_paths): - from google.cloud.firestore_v1beta1.client import _get_doc_mask - - return _get_doc_mask(field_paths) - - def test_none(self): - self.assertIsNone(self._call_fut(None)) - - def test_paths(self): - from google.cloud.firestore_v1beta1.types import common - - field_paths = ["a.b", "c"] - result = self._call_fut(field_paths) - expected = common.DocumentMask(field_paths=field_paths) - self.assertEqual(result, expected) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_batch_response(**kwargs): - from google.cloud.firestore_v1beta1.types import firestore - - return firestore.BatchGetDocumentsResponse(**kwargs) - - -def _doc_get_info(ref_string, values): - from google.cloud.firestore_v1beta1.types import document - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.firestore_v1beta1 import _helpers - - now = datetime.datetime.utcnow() - read_time = _datetime_to_pb_timestamp(now) - delta = datetime.timedelta(seconds=100) - update_time = _datetime_to_pb_timestamp(now - delta) - create_time = _datetime_to_pb_timestamp(now - 2 * delta) - - document_pb = document.Document( - name=ref_string, - fields=_helpers.encode_dict(values), - create_time=create_time, - update_time=update_time, - ) - - return document_pb, read_time diff --git a/tests/unit/v1beta1/test_collection.py b/tests/unit/v1beta1/test_collection.py deleted file mode 100644 index 53e1dc2c3..000000000 --- a/tests/unit/v1beta1/test_collection.py +++ /dev/null @@ -1,605 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import types -import unittest - -import mock -import pytest -import six - - -class TestCollectionReference(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.collection import CollectionReference - - return CollectionReference - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - @staticmethod - def _get_public_methods(klass): - return set( - name - for name, value in six.iteritems(klass.__dict__) - if (not name.startswith("_") and isinstance(value, types.FunctionType)) - ) - - def test_query_method_matching(self): - from google.cloud.firestore_v1beta1.query import Query - - query_methods = self._get_public_methods(Query) - klass = self._get_target_class() - collection_methods = self._get_public_methods(klass) - # Make sure every query method is present on - # ``CollectionReference``. - self.assertLessEqual(query_methods, collection_methods) - - def test_constructor(self): - collection_id1 = "rooms" - document_id = "roomA" - collection_id2 = "messages" - client = mock.sentinel.client - - collection = self._make_one( - collection_id1, document_id, collection_id2, client=client - ) - self.assertIs(collection._client, client) - expected_path = (collection_id1, document_id, collection_id2) - self.assertEqual(collection._path, expected_path) - - def test_constructor_invalid_path(self): - with self.assertRaises(ValueError): - self._make_one() - with self.assertRaises(ValueError): - self._make_one(99, "doc", "bad-collection-id") - with self.assertRaises(ValueError): - self._make_one("bad-document-ID", None, "sub-collection") - with self.assertRaises(ValueError): - self._make_one("Just", "A-Document") - - def test_constructor_invalid_kwarg(self): - with self.assertRaises(TypeError): - self._make_one("Coh-lek-shun", donut=True) - - def test___eq___other_type(self): - client = mock.sentinel.client - collection = self._make_one("name", client=client) - other = object() - self.assertFalse(collection == other) - - def test___eq___different_path_same_client(self): - client = mock.sentinel.client - collection = self._make_one("name", client=client) - other = self._make_one("other", client=client) - self.assertFalse(collection == other) - - def test___eq___same_path_different_client(self): - client = mock.sentinel.client - other_client = mock.sentinel.other_client - collection = self._make_one("name", client=client) - other = self._make_one("name", client=other_client) - self.assertFalse(collection == other) - - def test___eq___same_path_same_client(self): - client = mock.sentinel.client - collection = self._make_one("name", client=client) - other = self._make_one("name", client=client) - self.assertTrue(collection == other) - - def test_id_property(self): - collection_id = "hi-bob" - collection = self._make_one(collection_id) - self.assertEqual(collection.id, collection_id) - - def test_parent_property(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - collection_id1 = "grocery-store" - document_id = "market" - collection_id2 = "darth" - client = _make_client() - collection = self._make_one( - collection_id1, document_id, collection_id2, client=client - ) - - parent = collection.parent - self.assertIsInstance(parent, DocumentReference) - self.assertIs(parent._client, client) - self.assertEqual(parent._path, (collection_id1, document_id)) - - def test_parent_property_top_level(self): - collection = self._make_one("tahp-leh-vull") - self.assertIsNone(collection.parent) - - def test_document_factory_explicit_id(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - collection_id = "grocery-store" - document_id = "market" - client = _make_client() - collection = self._make_one(collection_id, client=client) - - child = collection.document(document_id) - self.assertIsInstance(child, DocumentReference) - self.assertIs(child._client, client) - self.assertEqual(child._path, (collection_id, document_id)) - - @mock.patch( - "google.cloud.firestore_v1beta1.collection._auto_id", - return_value="zorpzorpthreezorp012", - ) - def test_document_factory_auto_id(self, mock_auto_id): - from google.cloud.firestore_v1beta1.document import DocumentReference - - collection_name = "space-town" - client = _make_client() - collection = self._make_one(collection_name, client=client) - - child = collection.document() - self.assertIsInstance(child, DocumentReference) - self.assertIs(child._client, client) - self.assertEqual(child._path, (collection_name, mock_auto_id.return_value)) - - mock_auto_id.assert_called_once_with() - - def test__parent_info_top_level(self): - client = _make_client() - collection_id = "soap" - collection = self._make_one(collection_id, client=client) - - parent_path, expected_prefix = collection._parent_info() - - expected_path = "projects/{}/databases/{}/documents".format( - client.project, client._database - ) - self.assertEqual(parent_path, expected_path) - prefix = "{}/{}".format(expected_path, collection_id) - self.assertEqual(expected_prefix, prefix) - - def test__parent_info_nested(self): - collection_id1 = "bar" - document_id = "baz" - collection_id2 = "chunk" - client = _make_client() - collection = self._make_one( - collection_id1, document_id, collection_id2, client=client - ) - - parent_path, expected_prefix = collection._parent_info() - - expected_path = "projects/{}/databases/{}/documents/{}/{}".format( - client.project, client._database, collection_id1, document_id - ) - self.assertEqual(parent_path, expected_path) - prefix = "{}/{}".format(expected_path, collection_id2) - self.assertEqual(expected_prefix, prefix) - - def test_add_auto_assigned(self): - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.document import DocumentReference - from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP - from google.cloud.firestore_v1beta1._helpers import pbs_for_set_no_merge - - # Create a minimal fake GAPIC add attach it to a real client. - firestore_api = mock.Mock(spec=["create_document", "commit"]) - write_result = mock.Mock( - update_time=mock.sentinel.update_time, spec=["update_time"] - ) - commit_response = mock.Mock( - write_results=[write_result], - spec=["write_results", "commit_time"], - commit_time=mock.sentinel.commit_time, - ) - firestore_api.commit.return_value = commit_response - create_doc_response = document.Document() - firestore_api.create_document.return_value = create_doc_response - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a collection. - collection = self._make_one("grand-parent", "parent", "child", client=client) - - # Add a dummy response for the fake GAPIC. - parent_path = collection.parent._document_path - auto_assigned_id = "cheezburger" - name = "{}/{}/{}".format(parent_path, collection.id, auto_assigned_id) - create_doc_response = document.Document(name=name) - create_doc_response._pb.update_time.FromDatetime(datetime.datetime.utcnow()) - firestore_api.create_document.return_value = create_doc_response - - # Actually call add() on our collection; include a transform to make - # sure transforms during adds work. - document_data = {"been": "here", "now": SERVER_TIMESTAMP} - update_time, document_ref = collection.add(document_data) - - # Verify the response and the mocks. - self.assertIs(update_time, mock.sentinel.update_time) - self.assertIsInstance(document_ref, DocumentReference) - self.assertIs(document_ref._client, client) - expected_path = collection._path + (auto_assigned_id,) - self.assertEqual(document_ref._path, expected_path) - - # TODO(microgen): For now relax test. - # Expected: create_document(request={'parent': 'projects/project-project/databases/(default)/documents/grand-parent/parent', 'collection_id': 'child', 'document': , 'document_id': None, 'mask': None}, metadata=[('google-cloud-resource-prefix', 'projects/project-project/databases/(default)')]) - # Actual: create_document(request={'parent': 'projects/project-project/databases/(default)/documents/grand-parent/parent', 'collection_id': 'child', 'document': None, 'document_id': , 'mask': None}, metadata=[('google-cloud-resource-prefix', 'projects/project-project/databases/(default)')]) - - # expected_document_pb = document.Document() - # firestore_api.create_document.assert_called_once_with( - # request={ - # "parent": parent_path, - # "collection_id": collection.id, - # "document": expected_document_pb, - # "document_id": None, - # "mask": None, - # }, - # metadata=client._rpc_metadata, - # ) - write_pbs = pbs_for_set_no_merge(document_ref._document_path, document_data) - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - @staticmethod - def _write_pb_for_create(document_path, document_data): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import _helpers - - return write.Write( - update=document.Document( - name=document_path, fields=_helpers.encode_dict(document_data) - ), - current_document=common.Precondition(exists=False), - ) - - def test_add_explicit_id(self): - from google.cloud.firestore_v1beta1.document import DocumentReference - - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - write_result = mock.Mock( - update_time=mock.sentinel.update_time, spec=["update_time"] - ) - commit_response = mock.Mock( - write_results=[write_result], - spec=["write_results", "commit_time"], - commit_time=mock.sentinel.commit_time, - ) - firestore_api.commit.return_value = commit_response - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a collection and call add(). - collection = self._make_one("parent", client=client) - document_data = {"zorp": 208.75, "i-did-not": b"know that"} - doc_id = "child" - update_time, document_ref = collection.add(document_data, document_id=doc_id) - - # Verify the response and the mocks. - self.assertIs(update_time, mock.sentinel.update_time) - self.assertIsInstance(document_ref, DocumentReference) - self.assertIs(document_ref._client, client) - self.assertEqual(document_ref._path, (collection.id, doc_id)) - - write_pb = self._write_pb_for_create(document_ref._document_path, document_data) - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_select(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - field_paths = ["a", "b"] - query = collection.select(field_paths) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - projection_paths = [ - field_ref.field_path for field_ref in query._projection.fields - ] - self.assertEqual(projection_paths, field_paths) - - @staticmethod - def _make_field_filter_pb(field_path, op_string, value): - from google.cloud.firestore_v1beta1.types import query - from google.cloud.firestore_v1beta1 import _helpers - from google.cloud.firestore_v1beta1.query import _enum_from_op_string - - return query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path=field_path), - op=_enum_from_op_string(op_string), - value=_helpers.encode_value(value), - ) - - def test_where(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - field_path = "foo" - op_string = "==" - value = 45 - query = collection.where(field_path, op_string, value) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(len(query._field_filters), 1) - field_filter_pb = query._field_filters[0] - self.assertEqual( - field_filter_pb, self._make_field_filter_pb(field_path, op_string, value) - ) - - @staticmethod - def _make_order_pb(field_path, direction): - from google.cloud.firestore_v1beta1.types import query - from google.cloud.firestore_v1beta1.query import _enum_from_direction - - return query.StructuredQuery.Order( - field=query.StructuredQuery.FieldReference(field_path=field_path), - direction=_enum_from_direction(direction), - ) - - def test_order_by(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - field_path = "foo" - direction = Query.DESCENDING - query = collection.order_by(field_path, direction=direction) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(len(query._orders), 1) - order_pb = query._orders[0] - self.assertEqual(order_pb, self._make_order_pb(field_path, direction)) - - def test_limit(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - limit = 15 - query = collection.limit(limit) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._limit, limit) - - def test_offset(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - offset = 113 - query = collection.offset(offset) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._offset, offset) - - def test_start_at(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - doc_fields = {"a": "b"} - query = collection.start_at(doc_fields) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._start_at, (doc_fields, True)) - - def test_start_after(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - doc_fields = {"d": "foo", "e": 10} - query = collection.start_after(doc_fields) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._start_at, (doc_fields, False)) - - def test_end_before(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - doc_fields = {"bar": 10.5} - query = collection.end_before(doc_fields) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._end_at, (doc_fields, True)) - - def test_end_at(self): - from google.cloud.firestore_v1beta1.query import Query - - collection = self._make_one("collection") - doc_fields = {"opportunity": True, "reason": 9} - query = collection.end_at(doc_fields) - - self.assertIsInstance(query, Query) - self.assertIs(query._parent, collection) - self.assertEqual(query._end_at, (doc_fields, False)) - - def _list_documents_helper(self, page_size=None): - from google.api_core.page_iterator import Iterator - from google.api_core.page_iterator import Page - from google.cloud.firestore_v1beta1.document import DocumentReference - from google.cloud.firestore_v1beta1.services.firestore.client import ( - FirestoreClient, - ) - from google.cloud.firestore_v1beta1.types.document import Document - - class _Iterator(Iterator): - def __init__(self, pages): - super(_Iterator, self).__init__(client=None) - self._pages = pages - - def _next_page(self): - if self._pages: - page, self._pages = self._pages[0], self._pages[1:] - return Page(self, page, self.item_to_value) - - client = _make_client() - template = client._database_string + "/documents/{}" - document_ids = ["doc-1", "doc-2"] - documents = [ - Document(name=template.format(document_id)) for document_id in document_ids - ] - iterator = _Iterator(pages=[documents]) - api_client = mock.create_autospec(FirestoreClient) - api_client.list_documents.return_value = iterator - client._firestore_api_internal = api_client - collection = self._make_one("collection", client=client) - - if page_size is not None: - documents = list(collection.list_documents(page_size)) - else: - documents = list(collection.list_documents()) - - # Verify the response and the mocks. - self.assertEqual(len(documents), len(document_ids)) - for document, document_id in zip(documents, document_ids): - self.assertIsInstance(document, DocumentReference) - self.assertEqual(document.parent, collection) - self.assertEqual(document.id, document_id) - - parent, _ = collection._parent_info() - api_client.list_documents.assert_called_once_with( - request={ - "parent": parent, - "collection_id": collection.id, - "page_size": page_size, - "page_token": True, - }, - metadata=client._rpc_metadata, - ) - - def test_list_documents_wo_page_size(self): - self._list_documents_helper() - - def test_list_documents_w_page_size(self): - self._list_documents_helper(page_size=25) - - @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) - def test_get(self, query_class): - import warnings - - collection = self._make_one("collection") - with warnings.catch_warnings(record=True) as warned: - get_response = collection.get() - - query_class.assert_called_once_with(collection) - query_inst = query_class.return_value - self.assertIs(get_response, query_inst.stream.return_value) - query_inst.stream.assert_called_once_with(transaction=None) - - # Verify the deprecation - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) - - @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) - def test_get_with_transaction(self, query_class): - import warnings - - collection = self._make_one("collection") - transaction = mock.sentinel.txn - with warnings.catch_warnings(record=True) as warned: - get_response = collection.get(transaction=transaction) - - query_class.assert_called_once_with(collection) - query_inst = query_class.return_value - self.assertIs(get_response, query_inst.stream.return_value) - query_inst.stream.assert_called_once_with(transaction=transaction) - - # Verify the deprecation - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) - - @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) - def test_stream(self, query_class): - collection = self._make_one("collection") - stream_response = collection.stream() - - query_class.assert_called_once_with(collection) - query_inst = query_class.return_value - self.assertIs(stream_response, query_inst.stream.return_value) - query_inst.stream.assert_called_once_with(transaction=None) - - @mock.patch("google.cloud.firestore_v1beta1.query.Query", autospec=True) - def test_stream_with_transaction(self, query_class): - collection = self._make_one("collection") - transaction = mock.sentinel.txn - stream_response = collection.stream(transaction=transaction) - - query_class.assert_called_once_with(collection) - query_inst = query_class.return_value - self.assertIs(stream_response, query_inst.stream.return_value) - query_inst.stream.assert_called_once_with(transaction=transaction) - - @mock.patch("google.cloud.firestore_v1beta1.collection.Watch", autospec=True) - def test_on_snapshot(self, watch): - collection = self._make_one("collection") - collection.on_snapshot(None) - watch.for_query.assert_called_once() - - -class Test__auto_id(unittest.TestCase): - @staticmethod - def _call_fut(): - from google.cloud.firestore_v1beta1.collection import _auto_id - - return _auto_id() - - @mock.patch("random.choice") - def test_it(self, mock_rand_choice): - from google.cloud.firestore_v1beta1.collection import _AUTO_ID_CHARS - - mock_result = "0123456789abcdefghij" - mock_rand_choice.side_effect = list(mock_result) - result = self._call_fut() - self.assertEqual(result, mock_result) - - mock_calls = [mock.call(_AUTO_ID_CHARS)] * 20 - self.assertEqual(mock_rand_choice.mock_calls, mock_calls) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - with pytest.deprecated_call(): - return Client(project="project-project", credentials=credentials) diff --git a/tests/unit/v1beta1/test_document.py b/tests/unit/v1beta1/test_document.py deleted file mode 100644 index a009a6e23..000000000 --- a/tests/unit/v1beta1/test_document.py +++ /dev/null @@ -1,839 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import unittest - -import mock -import pytest -import datetime -import pytz - - -class TestDocumentReference(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.document import DocumentReference - - return DocumentReference - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - collection_id1 = "users" - document_id1 = "alovelace" - collection_id2 = "platform" - document_id2 = "*nix" - client = mock.MagicMock() - client.__hash__.return_value = 1234 - - document = self._make_one( - collection_id1, document_id1, collection_id2, document_id2, client=client - ) - self.assertIs(document._client, client) - expected_path = "/".join( - (collection_id1, document_id1, collection_id2, document_id2) - ) - self.assertEqual(document.path, expected_path) - - def test_constructor_invalid_path(self): - with self.assertRaises(ValueError): - self._make_one() - with self.assertRaises(ValueError): - self._make_one(None, "before", "bad-collection-id", "fifteen") - with self.assertRaises(ValueError): - self._make_one("bad-document-ID", None) - with self.assertRaises(ValueError): - self._make_one("Just", "A-Collection", "Sub") - - def test_constructor_invalid_kwarg(self): - with self.assertRaises(TypeError): - self._make_one("Coh-lek-shun", "Dahk-yu-mehnt", burger=18.75) - - def test___copy__(self): - client = _make_client("rain") - document = self._make_one("a", "b", client=client) - # Access the document path so it is copied. - doc_path = document._document_path - self.assertEqual(doc_path, document._document_path_internal) - - new_document = document.__copy__() - self.assertIsNot(new_document, document) - self.assertIs(new_document._client, document._client) - self.assertEqual(new_document._path, document._path) - self.assertEqual( - new_document._document_path_internal, document._document_path_internal - ) - - def test___deepcopy__calls_copy(self): - client = mock.sentinel.client - document = self._make_one("a", "b", client=client) - document.__copy__ = mock.Mock(return_value=mock.sentinel.new_doc, spec=[]) - - unused_memo = {} - new_document = document.__deepcopy__(unused_memo) - self.assertIs(new_document, mock.sentinel.new_doc) - document.__copy__.assert_called_once_with() - - def test__eq__same_type(self): - document1 = self._make_one("X", "YY", client=mock.sentinel.client) - document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) - document3 = self._make_one("X", "YY", client=mock.sentinel.client2) - document4 = self._make_one("X", "YY", client=mock.sentinel.client) - - pairs = ((document1, document2), (document1, document3), (document2, document3)) - for candidate1, candidate2 in pairs: - # We use == explicitly since assertNotEqual would use !=. - equality_val = candidate1 == candidate2 - self.assertFalse(equality_val) - - # Check the only equal one. - self.assertEqual(document1, document4) - self.assertIsNot(document1, document4) - - def test__eq__other_type(self): - document = self._make_one("X", "YY", client=mock.sentinel.client) - other = object() - equality_val = document == other - self.assertFalse(equality_val) - self.assertIs(document.__eq__(other), NotImplemented) - - def test___hash__(self): - client = mock.MagicMock() - client.__hash__.return_value = 234566789 - document = self._make_one("X", "YY", client=client) - self.assertEqual(hash(document), hash(("X", "YY")) + hash(client)) - - def test__ne__same_type(self): - document1 = self._make_one("X", "YY", client=mock.sentinel.client) - document2 = self._make_one("X", "ZZ", client=mock.sentinel.client) - document3 = self._make_one("X", "YY", client=mock.sentinel.client2) - document4 = self._make_one("X", "YY", client=mock.sentinel.client) - - self.assertNotEqual(document1, document2) - self.assertNotEqual(document1, document3) - self.assertNotEqual(document2, document3) - - # We use != explicitly since assertEqual would use ==. - inequality_val = document1 != document4 - self.assertFalse(inequality_val) - self.assertIsNot(document1, document4) - - def test__ne__other_type(self): - document = self._make_one("X", "YY", client=mock.sentinel.client) - other = object() - self.assertNotEqual(document, other) - self.assertIs(document.__ne__(other), NotImplemented) - - def test__document_path_property(self): - project = "hi-its-me-ok-bye" - client = _make_client(project=project) - - collection_id = "then" - document_id = "090909iii" - document = self._make_one(collection_id, document_id, client=client) - doc_path = document._document_path - expected = "projects/{}/databases/{}/documents/{}/{}".format( - project, client._database, collection_id, document_id - ) - self.assertEqual(doc_path, expected) - self.assertIs(document._document_path_internal, doc_path) - - # Make sure value is cached. - document._document_path_internal = mock.sentinel.cached - self.assertIs(document._document_path, mock.sentinel.cached) - - def test__document_path_property_no_client(self): - document = self._make_one("hi", "bye") - self.assertIsNone(document._client) - with self.assertRaises(ValueError): - getattr(document, "_document_path") - - self.assertIsNone(document._document_path_internal) - - def test_id_property(self): - document_id = "867-5309" - document = self._make_one("Co-lek-shun", document_id) - self.assertEqual(document.id, document_id) - - def test_parent_property(self): - from google.cloud.firestore_v1beta1.collection import CollectionReference - - collection_id = "grocery-store" - document_id = "market" - client = _make_client() - document = self._make_one(collection_id, document_id, client=client) - - parent = document.parent - self.assertIsInstance(parent, CollectionReference) - self.assertIs(parent._client, client) - self.assertEqual(parent._path, (collection_id,)) - - def test_collection_factory(self): - from google.cloud.firestore_v1beta1.collection import CollectionReference - - collection_id = "grocery-store" - document_id = "market" - new_collection = "fruits" - client = _make_client() - document = self._make_one(collection_id, document_id, client=client) - - child = document.collection(new_collection) - self.assertIsInstance(child, CollectionReference) - self.assertIs(child._client, client) - self.assertEqual(child._path, (collection_id, document_id, new_collection)) - - @staticmethod - def _write_pb_for_create(document_path, document_data): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import _helpers - - return write.Write( - update=document.Document( - name=document_path, fields=_helpers.encode_dict(document_data) - ), - current_document=common.Precondition(exists=False), - ) - - @staticmethod - def _make_commit_repsonse(write_results=None): - from google.cloud.firestore_v1beta1.types import firestore - - response = mock.create_autospec(firestore.CommitResponse) - response.write_results = write_results or [mock.sentinel.write_result] - response.commit_time = mock.sentinel.commit_time - return response - - def test_create(self): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - firestore_api.commit.return_value = self._make_commit_repsonse() - - # Attach the fake GAPIC to a real client. - client = _make_client("dignity") - client._firestore_api_internal = firestore_api - - # Actually make a document and call create(). - document = self._make_one("foo", "twelve", client=client) - document_data = {"hello": "goodbye", "count": 99} - write_result = document.create(document_data) - - # Verify the response and the mocks. - self.assertIs(write_result, mock.sentinel.write_result) - write_pb = self._write_pb_for_create(document._document_path, document_data) - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_create_empty(self): - # Create a minimal fake GAPIC with a dummy response. - from google.cloud.firestore_v1beta1.document import DocumentReference - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - firestore_api = mock.Mock(spec=["commit"]) - document_reference = mock.create_autospec(DocumentReference) - snapshot = mock.create_autospec(DocumentSnapshot) - snapshot.exists = True - document_reference.get.return_value = snapshot - firestore_api.commit.return_value = self._make_commit_repsonse( - write_results=[document_reference] - ) - - # Attach the fake GAPIC to a real client. - client = _make_client("dignity") - client._firestore_api_internal = firestore_api - client.get_all = mock.MagicMock() - client.get_all.exists.return_value = True - - # Actually make a document and call create(). - document = self._make_one("foo", "twelve", client=client) - document_data = {} - write_result = document.create(document_data) - self.assertTrue(write_result.get().exists) - - @staticmethod - def _write_pb_for_set(document_path, document_data, merge): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import _helpers - - write_pbs = write.Write( - update=document.Document( - name=document_path, fields=_helpers.encode_dict(document_data) - ) - ) - if merge: - field_paths = [ - field_path - for field_path, value in _helpers.extract_fields( - document_data, _helpers.FieldPath() - ) - ] - field_paths = [ - field_path.to_api_repr() for field_path in sorted(field_paths) - ] - mask = common.DocumentMask(field_paths=sorted(field_paths)) - write_pbs._pb.update_mask.CopyFrom(mask._pb) - return write_pbs - - def _set_helper(self, merge=False, **option_kwargs): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - firestore_api.commit.return_value = self._make_commit_repsonse() - - # Attach the fake GAPIC to a real client. - client = _make_client("db-dee-bee") - client._firestore_api_internal = firestore_api - - # Actually make a document and call create(). - document = self._make_one("User", "Interface", client=client) - document_data = {"And": 500, "Now": b"\xba\xaa\xaa \xba\xaa\xaa"} - write_result = document.set(document_data, merge) - - # Verify the response and the mocks. - self.assertIs(write_result, mock.sentinel.write_result) - write_pb = self._write_pb_for_set(document._document_path, document_data, merge) - - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_set(self): - self._set_helper() - - def test_set_merge(self): - self._set_helper(merge=True) - - @staticmethod - def _write_pb_for_update(document_path, update_values, field_paths): - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1 import _helpers - - return write.Write( - update=document.Document( - name=document_path, fields=_helpers.encode_dict(update_values) - ), - update_mask=common.DocumentMask(field_paths=field_paths), - current_document=common.Precondition(exists=True), - ) - - def _update_helper(self, **option_kwargs): - from google.cloud.firestore_v1beta1.transforms import DELETE_FIELD - - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - firestore_api.commit.return_value = self._make_commit_repsonse() - - # Attach the fake GAPIC to a real client. - client = _make_client("potato-chip") - client._firestore_api_internal = firestore_api - - # Actually make a document and call create(). - document = self._make_one("baked", "Alaska", client=client) - # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. - field_updates = collections.OrderedDict( - (("hello", 1), ("then.do", False), ("goodbye", DELETE_FIELD)) - ) - if option_kwargs: - option = client.write_option(**option_kwargs) - write_result = document.update(field_updates, option=option) - else: - option = None - write_result = document.update(field_updates) - - # Verify the response and the mocks. - self.assertIs(write_result, mock.sentinel.write_result) - update_values = { - "hello": field_updates["hello"], - "then": {"do": field_updates["then.do"]}, - } - field_paths = list(field_updates.keys()) - write_pb = self._write_pb_for_update( - document._document_path, update_values, sorted(field_paths) - ) - if option is not None: - option.modify_write(write_pb) - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_update_with_exists(self): - with self.assertRaises(ValueError): - self._update_helper(exists=True) - - def test_update(self): - self._update_helper() - - def test_update_with_precondition(self): - from google.protobuf import timestamp_pb2 - - timestamp = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) - self._update_helper(last_update_time=timestamp) - - def test_empty_update(self): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - firestore_api.commit.return_value = self._make_commit_repsonse() - - # Attach the fake GAPIC to a real client. - client = _make_client("potato-chip") - client._firestore_api_internal = firestore_api - - # Actually make a document and call create(). - document = self._make_one("baked", "Alaska", client=client) - # "Cheat" and use OrderedDict-s so that iteritems() is deterministic. - field_updates = {} - with self.assertRaises(ValueError): - document.update(field_updates) - - def _delete_helper(self, **option_kwargs): - from google.cloud.firestore_v1beta1.types import write - - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["commit"]) - firestore_api.commit.return_value = self._make_commit_repsonse() - - # Attach the fake GAPIC to a real client. - client = _make_client("donut-base") - client._firestore_api_internal = firestore_api - - # Actually make a document and call delete(). - document = self._make_one("where", "we-are", client=client) - if option_kwargs: - option = client.write_option(**option_kwargs) - delete_time = document.delete(option=option) - else: - option = None - delete_time = document.delete() - - # Verify the response and the mocks. - self.assertIs(delete_time, mock.sentinel.commit_time) - write_pb = write.Write(delete=document._document_path) - if option is not None: - option.modify_write(write_pb) - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": [write_pb], - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_delete(self): - self._delete_helper() - - def test_delete_with_option(self): - from google.protobuf import timestamp_pb2 - - timestamp_pb = timestamp_pb2.Timestamp(seconds=1058655101, nanos=100022244) - self._delete_helper(last_update_time=timestamp_pb) - - def _get_helper(self, field_paths=None, use_transaction=False, not_found=False): - from google.api_core.exceptions import NotFound - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.transaction import Transaction - - # Create a minimal fake GAPIC with a dummy response. - create_time = 123 - update_time = 234 - firestore_api = mock.Mock(spec=["get_document"]) - response = mock.create_autospec(document.Document) - response.fields = {} - response.create_time = create_time - response.update_time = update_time - - if not_found: - firestore_api.get_document.side_effect = NotFound("testing") - else: - firestore_api.get_document.return_value = response - - client = _make_client("donut-base") - client._firestore_api_internal = firestore_api - - document = self._make_one("where", "we-are", client=client) - - if use_transaction: - transaction = Transaction(client) - transaction_id = transaction._id = b"asking-me-2" - else: - transaction = None - - snapshot = document.get(field_paths=field_paths, transaction=transaction) - - self.assertIs(snapshot.reference, document) - if not_found: - self.assertIsNone(snapshot._data) - self.assertFalse(snapshot.exists) - self.assertIsNone(snapshot.read_time) - self.assertIsNone(snapshot.create_time) - self.assertIsNone(snapshot.update_time) - else: - self.assertEqual(snapshot.to_dict(), {}) - self.assertTrue(snapshot.exists) - self.assertIsNone(snapshot.read_time) - self.assertIs(snapshot.create_time, create_time) - self.assertIs(snapshot.update_time, update_time) - - # Verify the request made to the API - if field_paths is not None: - mask = common.DocumentMask(field_paths=sorted(field_paths)) - else: - mask = None - - if use_transaction: - expected_transaction_id = transaction_id - else: - expected_transaction_id = None - - firestore_api.get_document.assert_called_once_with( - request={ - "name": document._document_path, - "mask": mask, - "transaction": expected_transaction_id, - }, - metadata=client._rpc_metadata, - ) - - def test_get_not_found(self): - self._get_helper(not_found=True) - - def test_get_default(self): - self._get_helper() - - def test_get_w_string_field_path(self): - with self.assertRaises(ValueError): - self._get_helper(field_paths="foo") - - def test_get_with_field_path(self): - self._get_helper(field_paths=["foo"]) - - def test_get_with_multiple_field_paths(self): - self._get_helper(field_paths=["foo", "bar.baz"]) - - def test_get_with_transaction(self): - self._get_helper(use_transaction=True) - - def _collections_helper(self, page_size=None): - from google.api_core.page_iterator import Iterator - from google.api_core.page_iterator import Page - from google.cloud.firestore_v1beta1.collection import CollectionReference - from google.cloud.firestore_v1beta1.services.firestore.client import ( - FirestoreClient, - ) - - class _Iterator(Iterator): - def __init__(self, pages): - super(_Iterator, self).__init__(client=None) - self._pages = pages - - def _next_page(self): - if self._pages: - page, self._pages = self._pages[0], self._pages[1:] - return Page(self, page, self.item_to_value) - - collection_ids = ["coll-1", "coll-2"] - iterator = _Iterator(pages=[collection_ids]) - api_client = mock.create_autospec(FirestoreClient) - api_client.list_collection_ids.return_value = iterator - - client = _make_client() - client._firestore_api_internal = api_client - - # Actually make a document and call delete(). - document = self._make_one("where", "we-are", client=client) - if page_size is not None: - collections = list(document.collections(page_size=page_size)) - else: - collections = list(document.collections()) - - # Verify the response and the mocks. - self.assertEqual(len(collections), len(collection_ids)) - for collection, collection_id in zip(collections, collection_ids): - self.assertIsInstance(collection, CollectionReference) - self.assertEqual(collection.parent, document) - self.assertEqual(collection.id, collection_id) - - api_client.list_collection_ids.assert_called_once_with( - request={"parent": document._document_path, "page_size": page_size}, - metadata=client._rpc_metadata, - ) - - def test_collections_wo_page_size(self): - self._collections_helper() - - def test_collections_w_page_size(self): - self._collections_helper(page_size=10) - - @mock.patch("google.cloud.firestore_v1beta1.document.Watch", autospec=True) - def test_on_snapshot(self, watch): - client = mock.Mock(_database_string="sprinklez", spec=["_database_string"]) - document = self._make_one("yellow", "mellow", client=client) - document.on_snapshot(None) - watch.for_document.assert_called_once() - - -class TestDocumentSnapshot(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - return DocumentSnapshot - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def _make_reference(self, *args, **kwargs): - from google.cloud.firestore_v1beta1.document import DocumentReference - - return DocumentReference(*args, **kwargs) - - def _make_w_ref(self, ref_path=("a", "b"), data={}, exists=True): - client = mock.sentinel.client - reference = self._make_reference(*ref_path, client=client) - return self._make_one( - reference, - data, - exists, - mock.sentinel.read_time, - mock.sentinel.create_time, - mock.sentinel.update_time, - ) - - def test_constructor(self): - client = mock.sentinel.client - reference = self._make_reference("hi", "bye", client=client) - data = {"zoop": 83} - snapshot = self._make_one( - reference, - data, - True, - mock.sentinel.read_time, - mock.sentinel.create_time, - mock.sentinel.update_time, - ) - self.assertIs(snapshot._reference, reference) - self.assertEqual(snapshot._data, data) - self.assertIsNot(snapshot._data, data) # Make sure copied. - self.assertTrue(snapshot._exists) - self.assertIs(snapshot.read_time, mock.sentinel.read_time) - self.assertIs(snapshot.create_time, mock.sentinel.create_time) - self.assertIs(snapshot.update_time, mock.sentinel.update_time) - - def test___eq___other_type(self): - snapshot = self._make_w_ref() - other = object() - self.assertFalse(snapshot == other) - - def test___eq___different_reference_same_data(self): - snapshot = self._make_w_ref(("a", "b")) - other = self._make_w_ref(("c", "d")) - self.assertFalse(snapshot == other) - - def test___eq___same_reference_different_data(self): - snapshot = self._make_w_ref(("a", "b")) - other = self._make_w_ref(("a", "b"), {"foo": "bar"}) - self.assertFalse(snapshot == other) - - def test___eq___same_reference_same_data(self): - snapshot = self._make_w_ref(("a", "b"), {"foo": "bar"}) - other = self._make_w_ref(("a", "b"), {"foo": "bar"}) - self.assertTrue(snapshot == other) - - def test___hash__(self): - client = mock.MagicMock() - client.__hash__.return_value = 234566789 - reference = self._make_reference("hi", "bye", client=client) - data = {"zoop": 83} - update_time = datetime.datetime.fromtimestamp(123456, pytz.utc) - snapshot = self._make_one( - reference, data, True, None, mock.sentinel.create_time, update_time - ) - self.assertEqual(hash(snapshot), hash(reference) + hash(123456) + hash(0)) - - def test__client_property(self): - reference = self._make_reference( - "ok", "fine", "now", "fore", client=mock.sentinel.client - ) - snapshot = self._make_one(reference, {}, False, None, None, None) - self.assertIs(snapshot._client, mock.sentinel.client) - - def test_exists_property(self): - reference = mock.sentinel.reference - - snapshot1 = self._make_one(reference, {}, False, None, None, None) - self.assertFalse(snapshot1.exists) - snapshot2 = self._make_one(reference, {}, True, None, None, None) - self.assertTrue(snapshot2.exists) - - def test_id_property(self): - document_id = "around" - reference = self._make_reference( - "look", document_id, client=mock.sentinel.client - ) - snapshot = self._make_one(reference, {}, True, None, None, None) - self.assertEqual(snapshot.id, document_id) - self.assertEqual(reference.id, document_id) - - def test_reference_property(self): - snapshot = self._make_one(mock.sentinel.reference, {}, True, None, None, None) - self.assertIs(snapshot.reference, mock.sentinel.reference) - - def test_get(self): - data = {"one": {"bold": "move"}} - snapshot = self._make_one(None, data, True, None, None, None) - - first_read = snapshot.get("one") - second_read = snapshot.get("one") - self.assertEqual(first_read, data.get("one")) - self.assertIsNot(first_read, data.get("one")) - self.assertEqual(first_read, second_read) - self.assertIsNot(first_read, second_read) - - with self.assertRaises(KeyError): - snapshot.get("two") - - def test_nonexistent_snapshot(self): - snapshot = self._make_one(None, None, False, None, None, None) - self.assertIsNone(snapshot.get("one")) - - def test_to_dict(self): - data = {"a": 10, "b": ["definitely", "mutable"], "c": {"45": 50}} - snapshot = self._make_one(None, data, True, None, None, None) - as_dict = snapshot.to_dict() - self.assertEqual(as_dict, data) - self.assertIsNot(as_dict, data) - # Check that the data remains unchanged. - as_dict["b"].append("hi") - self.assertEqual(data, snapshot.to_dict()) - self.assertNotEqual(data, as_dict) - - def test_non_existent(self): - snapshot = self._make_one(None, None, False, None, None, None) - as_dict = snapshot.to_dict() - self.assertIsNone(as_dict) - - -class Test__get_document_path(unittest.TestCase): - @staticmethod - def _call_fut(client, path): - from google.cloud.firestore_v1beta1.document import _get_document_path - - return _get_document_path(client, path) - - def test_it(self): - project = "prah-jekt" - client = _make_client(project=project) - path = ("Some", "Document", "Child", "Shockument") - document_path = self._call_fut(client, path) - - expected = "projects/{}/databases/{}/documents/{}".format( - project, client._database, "/".join(path) - ) - self.assertEqual(document_path, expected) - - -class Test__consume_single_get(unittest.TestCase): - @staticmethod - def _call_fut(response_iterator): - from google.cloud.firestore_v1beta1.document import _consume_single_get - - return _consume_single_get(response_iterator) - - def test_success(self): - response_iterator = iter([mock.sentinel.result]) - result = self._call_fut(response_iterator) - self.assertIs(result, mock.sentinel.result) - - def test_failure_not_enough(self): - response_iterator = iter([]) - with self.assertRaises(ValueError): - self._call_fut(response_iterator) - - def test_failure_too_many(self): - response_iterator = iter([None, None]) - with self.assertRaises(ValueError): - self._call_fut(response_iterator) - - -class Test__first_write_result(unittest.TestCase): - @staticmethod - def _call_fut(write_results): - from google.cloud.firestore_v1beta1.document import _first_write_result - - return _first_write_result(write_results) - - def test_success(self): - from google.protobuf import timestamp_pb2 - from google.cloud.firestore_v1beta1.types import write - - single_result = write.WriteResult( - update_time=timestamp_pb2.Timestamp(seconds=1368767504, nanos=458000123) - ) - write_results = [single_result] - result = self._call_fut(write_results) - self.assertIs(result, single_result) - - def test_failure_not_enough(self): - write_results = [] - with self.assertRaises(ValueError): - self._call_fut(write_results) - - def test_more_than_one(self): - from google.cloud.firestore_v1beta1.types import write - - result1 = write.WriteResult() - result2 = write.WriteResult() - write_results = [result1, result2] - result = self._call_fut(write_results) - self.assertIs(result, result1) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="project-project"): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - return Client(project=project, credentials=credentials) diff --git a/tests/unit/v1beta1/test_field_path.py b/tests/unit/v1beta1/test_field_path.py deleted file mode 100644 index 22f314e61..000000000 --- a/tests/unit/v1beta1/test_field_path.py +++ /dev/null @@ -1,495 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test__tokenize_field_path(unittest.TestCase): - @staticmethod - def _call_fut(path): - from google.cloud.firestore_v1beta1 import field_path - - return field_path._tokenize_field_path(path) - - def _expect(self, path, split_path): - self.assertEqual(list(self._call_fut(path)), split_path) - - def test_w_empty(self): - self._expect("", []) - - def test_w_single_dot(self): - self._expect(".", ["."]) - - def test_w_single_simple(self): - self._expect("abc", ["abc"]) - - def test_w_single_quoted(self): - self._expect("`c*de`", ["`c*de`"]) - - def test_w_quoted_embedded_dot(self): - self._expect("`c*.de`", ["`c*.de`"]) - - def test_w_quoted_escaped_backtick(self): - self._expect(r"`c*\`de`", [r"`c*\`de`"]) - - def test_w_dotted_quoted(self): - self._expect("`*`.`~`", ["`*`", ".", "`~`"]) - - def test_w_dotted(self): - self._expect("a.b.`c*de`", ["a", ".", "b", ".", "`c*de`"]) - - def test_w_dotted_escaped(self): - self._expect("_0.`1`.`+2`", ["_0", ".", "`1`", ".", "`+2`"]) - - def test_w_unconsumed_characters(self): - path = "a~b" - with self.assertRaises(ValueError): - list(self._call_fut(path)) - - -class Test_split_field_path(unittest.TestCase): - @staticmethod - def _call_fut(path): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.split_field_path(path) - - def test_w_single_dot(self): - with self.assertRaises(ValueError): - self._call_fut(".") - - def test_w_leading_dot(self): - with self.assertRaises(ValueError): - self._call_fut(".a.b.c") - - def test_w_trailing_dot(self): - with self.assertRaises(ValueError): - self._call_fut("a.b.") - - def test_w_missing_dot(self): - with self.assertRaises(ValueError): - self._call_fut("a`c*de`f") - - def test_w_half_quoted_field(self): - with self.assertRaises(ValueError): - self._call_fut("`c*de") - - def test_w_empty(self): - self.assertEqual(self._call_fut(""), []) - - def test_w_simple_field(self): - self.assertEqual(self._call_fut("a"), ["a"]) - - def test_w_dotted_field(self): - self.assertEqual(self._call_fut("a.b.cde"), ["a", "b", "cde"]) - - def test_w_quoted_field(self): - self.assertEqual(self._call_fut("a.b.`c*de`"), ["a", "b", "`c*de`"]) - - def test_w_quoted_field_escaped_backtick(self): - self.assertEqual(self._call_fut(r"`c*\`de`"), [r"`c*\`de`"]) - - -class Test_parse_field_path(unittest.TestCase): - @staticmethod - def _call_fut(path): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.parse_field_path(path) - - def test_wo_escaped_names(self): - self.assertEqual(self._call_fut("a.b.c"), ["a", "b", "c"]) - - def test_w_escaped_backtick(self): - self.assertEqual(self._call_fut("`a\\`b`.c.d"), ["a`b", "c", "d"]) - - def test_w_escaped_backslash(self): - self.assertEqual(self._call_fut("`a\\\\b`.c.d"), ["a\\b", "c", "d"]) - - def test_w_first_name_escaped_wo_closing_backtick(self): - with self.assertRaises(ValueError): - self._call_fut("`a\\`b.c.d") - - -class Test_render_field_path(unittest.TestCase): - @staticmethod - def _call_fut(field_names): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.render_field_path(field_names) - - def test_w_empty(self): - self.assertEqual(self._call_fut([]), "") - - def test_w_one_simple(self): - self.assertEqual(self._call_fut(["a"]), "a") - - def test_w_one_starts_w_digit(self): - self.assertEqual(self._call_fut(["0abc"]), "`0abc`") - - def test_w_one_w_non_alphanum(self): - self.assertEqual(self._call_fut(["a b c"]), "`a b c`") - - def test_w_one_w_backtick(self): - self.assertEqual(self._call_fut(["a`b"]), "`a\\`b`") - - def test_w_one_w_backslash(self): - self.assertEqual(self._call_fut(["a\\b"]), "`a\\\\b`") - - def test_multiple(self): - self.assertEqual(self._call_fut(["a", "b", "c"]), "a.b.c") - - -class Test_get_nested_value(unittest.TestCase): - - DATA = { - "top1": {"middle2": {"bottom3": 20, "bottom4": 22}, "middle5": True}, - "top6": b"\x00\x01 foo", - } - - @staticmethod - def _call_fut(path, data): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.get_nested_value(path, data) - - def test_simple(self): - self.assertIs(self._call_fut("top1", self.DATA), self.DATA["top1"]) - - def test_nested(self): - self.assertIs( - self._call_fut("top1.middle2", self.DATA), self.DATA["top1"]["middle2"] - ) - self.assertIs( - self._call_fut("top1.middle2.bottom3", self.DATA), - self.DATA["top1"]["middle2"]["bottom3"], - ) - - def test_missing_top_level(self): - from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_MISSING_TOP - - field_path = "top8" - with self.assertRaises(KeyError) as exc_info: - self._call_fut(field_path, self.DATA) - - err_msg = _FIELD_PATH_MISSING_TOP.format(field_path) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - def test_missing_key(self): - from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_MISSING_KEY - - with self.assertRaises(KeyError) as exc_info: - self._call_fut("top1.middle2.nope", self.DATA) - - err_msg = _FIELD_PATH_MISSING_KEY.format("nope", "top1.middle2") - self.assertEqual(exc_info.exception.args, (err_msg,)) - - def test_bad_type(self): - from google.cloud.firestore_v1beta1.field_path import _FIELD_PATH_WRONG_TYPE - - with self.assertRaises(KeyError) as exc_info: - self._call_fut("top6.middle7", self.DATA) - - err_msg = _FIELD_PATH_WRONG_TYPE.format("top6", "middle7") - self.assertEqual(exc_info.exception.args, (err_msg,)) - - -class TestFieldPath(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1 import field_path - - return field_path.FieldPath - - def _make_one(self, *args): - klass = self._get_target_class() - return klass(*args) - - def test_ctor_w_none_in_part(self): - with self.assertRaises(ValueError): - self._make_one("a", None, "b") - - def test_ctor_w_empty_string_in_part(self): - with self.assertRaises(ValueError): - self._make_one("a", "", "b") - - def test_ctor_w_integer_part(self): - with self.assertRaises(ValueError): - self._make_one("a", 3, "b") - - def test_ctor_w_list(self): - parts = ["a", "b", "c"] - with self.assertRaises(ValueError): - self._make_one(parts) - - def test_ctor_w_tuple(self): - parts = ("a", "b", "c") - with self.assertRaises(ValueError): - self._make_one(parts) - - def test_ctor_w_iterable_part(self): - with self.assertRaises(ValueError): - self._make_one("a", ["a"], "b") - - def test_constructor_w_single_part(self): - field_path = self._make_one("a") - self.assertEqual(field_path.parts, ("a",)) - - def test_constructor_w_multiple_parts(self): - field_path = self._make_one("a", "b", "c") - self.assertEqual(field_path.parts, ("a", "b", "c")) - - def test_ctor_w_invalid_chars_in_part(self): - invalid_parts = ("~", "*", "/", "[", "]", ".") - for invalid_part in invalid_parts: - field_path = self._make_one(invalid_part) - self.assertEqual(field_path.parts, (invalid_part,)) - - def test_ctor_w_double_dots(self): - field_path = self._make_one("a..b") - self.assertEqual(field_path.parts, ("a..b",)) - - def test_ctor_w_unicode(self): - field_path = self._make_one("一", "二", "三") - self.assertEqual(field_path.parts, ("一", "二", "三")) - - def test_from_api_repr_w_empty_string(self): - api_repr = "" - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(api_repr) - - def test_from_api_repr_w_empty_field_name(self): - api_repr = "a..b" - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(api_repr) - - def test_from_api_repr_w_invalid_chars(self): - invalid_parts = ("~", "*", "/", "[", "]", ".") - for invalid_part in invalid_parts: - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(invalid_part) - - def test_from_api_repr_w_ascii_single(self): - api_repr = "a" - field_path = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(field_path.parts, ("a",)) - - def test_from_api_repr_w_ascii_dotted(self): - api_repr = "a.b.c" - field_path = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(field_path.parts, ("a", "b", "c")) - - def test_from_api_repr_w_non_ascii_dotted_non_quoted(self): - api_repr = "a.一" - with self.assertRaises(ValueError): - self._get_target_class().from_api_repr(api_repr) - - def test_from_api_repr_w_non_ascii_dotted_quoted(self): - api_repr = "a.`一`" - field_path = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(field_path.parts, ("a", "一")) - - def test_from_string_w_empty_string(self): - path_string = "" - with self.assertRaises(ValueError): - self._get_target_class().from_string(path_string) - - def test_from_string_w_empty_field_name(self): - path_string = "a..b" - with self.assertRaises(ValueError): - self._get_target_class().from_string(path_string) - - def test_from_string_w_leading_dot(self): - path_string = ".b.c" - with self.assertRaises(ValueError): - self._get_target_class().from_string(path_string) - - def test_from_string_w_trailing_dot(self): - path_string = "a.b." - with self.assertRaises(ValueError): - self._get_target_class().from_string(path_string) - - def test_from_string_w_leading_invalid_chars(self): - invalid_paths = ("~", "*", "/", "[", "]") - for invalid_path in invalid_paths: - field_path = self._get_target_class().from_string(invalid_path) - self.assertEqual(field_path.parts, (invalid_path,)) - - def test_from_string_w_embedded_invalid_chars(self): - invalid_paths = ("a~b", "x*y", "f/g", "h[j", "k]l") - for invalid_path in invalid_paths: - with self.assertRaises(ValueError): - self._get_target_class().from_string(invalid_path) - - def test_from_string_w_ascii_single(self): - path_string = "a" - field_path = self._get_target_class().from_string(path_string) - self.assertEqual(field_path.parts, ("a",)) - - def test_from_string_w_ascii_dotted(self): - path_string = "a.b.c" - field_path = self._get_target_class().from_string(path_string) - self.assertEqual(field_path.parts, ("a", "b", "c")) - - def test_from_string_w_non_ascii_dotted(self): - path_string = "a.一" - field_path = self._get_target_class().from_string(path_string) - self.assertEqual(field_path.parts, ("a", "一")) - - def test___hash___w_single_part(self): - field_path = self._make_one("a") - self.assertEqual(hash(field_path), hash("a")) - - def test___hash___w_multiple_parts(self): - field_path = self._make_one("a", "b") - self.assertEqual(hash(field_path), hash("a.b")) - - def test___hash___w_escaped_parts(self): - field_path = self._make_one("a", "3") - self.assertEqual(hash(field_path), hash("a.`3`")) - - def test___eq___w_matching_type(self): - field_path = self._make_one("a", "b") - string_path = self._get_target_class().from_string("a.b") - self.assertEqual(field_path, string_path) - - def test___eq___w_non_matching_type(self): - field_path = self._make_one("a", "c") - other = mock.Mock() - other.parts = "a", "b" - self.assertNotEqual(field_path, other) - - def test___lt___w_matching_type(self): - field_path = self._make_one("a", "b") - string_path = self._get_target_class().from_string("a.c") - self.assertTrue(field_path < string_path) - - def test___lt___w_non_matching_type(self): - field_path = self._make_one("a", "b") - other = object() - # Python 2 doesn't raise TypeError here, but Python3 does. - self.assertIs(field_path.__lt__(other), NotImplemented) - - def test___add__(self): - path1 = "a123", "b456" - path2 = "c789", "d012" - path3 = "c789.d012" - field_path1 = self._make_one(*path1) - field_path1_string = self._make_one(*path1) - field_path2 = self._make_one(*path2) - field_path1 += field_path2 - field_path1_string += path3 - field_path2 = field_path2 + self._make_one(*path1) - self.assertEqual(field_path1, self._make_one(*(path1 + path2))) - self.assertEqual(field_path2, self._make_one(*(path2 + path1))) - self.assertEqual(field_path1_string, field_path1) - self.assertNotEqual(field_path1, field_path2) - with self.assertRaises(TypeError): - field_path1 + 305 - - def test_to_api_repr_a(self): - parts = "a" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "a") - - def test_to_api_repr_backtick(self): - parts = "`" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r"`\``") - - def test_to_api_repr_dot(self): - parts = "." - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "`.`") - - def test_to_api_repr_slash(self): - parts = "\\" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r"`\\`") - - def test_to_api_repr_double_slash(self): - parts = r"\\" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), r"`\\\\`") - - def test_to_api_repr_underscore(self): - parts = "_33132" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "_33132") - - def test_to_api_repr_unicode_non_simple(self): - parts = "一" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "`一`") - - def test_to_api_repr_number_non_simple(self): - parts = "03" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "`03`") - - def test_to_api_repr_simple_with_dot(self): - field_path = self._make_one("a.b") - self.assertEqual(field_path.to_api_repr(), "`a.b`") - - def test_to_api_repr_non_simple_with_dot(self): - parts = "a.一" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "`a.一`") - - def test_to_api_repr_simple(self): - parts = "a0332432" - field_path = self._make_one(parts) - self.assertEqual(field_path.to_api_repr(), "a0332432") - - def test_to_api_repr_chain(self): - parts = "a", "`", "\\", "_3", "03", "a03", "\\\\", "a0332432", "一" - field_path = self._make_one(*parts) - self.assertEqual( - field_path.to_api_repr(), r"a.`\``.`\\`._3.`03`.a03.`\\\\`.a0332432.`一`" - ) - - def test_eq_or_parent_same(self): - field_path = self._make_one("a", "b") - other = self._make_one("a", "b") - self.assertTrue(field_path.eq_or_parent(other)) - - def test_eq_or_parent_prefix(self): - field_path = self._make_one("a", "b") - other = self._make_one("a", "b", "c") - self.assertTrue(field_path.eq_or_parent(other)) - self.assertTrue(other.eq_or_parent(field_path)) - - def test_eq_or_parent_no_prefix(self): - field_path = self._make_one("a", "b") - other = self._make_one("d", "e", "f") - self.assertFalse(field_path.eq_or_parent(other)) - self.assertFalse(other.eq_or_parent(field_path)) - - def test_lineage_empty(self): - field_path = self._make_one() - expected = set() - self.assertEqual(field_path.lineage(), expected) - - def test_lineage_single(self): - field_path = self._make_one("a") - expected = set() - self.assertEqual(field_path.lineage(), expected) - - def test_lineage_nested(self): - field_path = self._make_one("a", "b", "c") - expected = set([self._make_one("a"), self._make_one("a", "b")]) - self.assertEqual(field_path.lineage(), expected) diff --git a/tests/unit/v1beta1/test_order.py b/tests/unit/v1beta1/test_order.py deleted file mode 100644 index 2516b9421..000000000 --- a/tests/unit/v1beta1/test_order.py +++ /dev/null @@ -1,247 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http:#www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import six -import unittest - -from google.cloud.firestore_v1beta1._helpers import encode_value, GeoPoint -from google.cloud.firestore_v1beta1.order import Order -from google.cloud.firestore_v1beta1.order import TypeOrder - -from google.cloud.firestore_v1beta1.types import document - -from google.protobuf import timestamp_pb2 - - -class TestOrder(unittest.TestCase): - - if six.PY2: - assertRaisesRegex = unittest.TestCase.assertRaisesRegexp - - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.order import Order - - return Order - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_order(self): - # Constants used to represent min/max values of storage types. - int_max_value = 2 ** 31 - 1 - int_min_value = -(2 ** 31) - float_min_value = 1.175494351 ** -38 - float_nan = float("nan") - inf = float("inf") - - groups = [None] * 65 - - groups[0] = [nullValue()] - - groups[1] = [_boolean_value(False)] - groups[2] = [_boolean_value(True)] - - # numbers - groups[3] = [_double_value(float_nan), _double_value(float_nan)] - groups[4] = [_double_value(-inf)] - groups[5] = [_int_value(int_min_value - 1)] - groups[6] = [_int_value(int_min_value)] - groups[7] = [_double_value(-1.1)] - # Integers and Doubles order the same. - groups[8] = [_int_value(-1), _double_value(-1.0)] - groups[9] = [_double_value(-float_min_value)] - # zeros all compare the same. - groups[10] = [ - _int_value(0), - _double_value(-0.0), - _double_value(0.0), - _double_value(+0.0), - ] - groups[11] = [_double_value(float_min_value)] - groups[12] = [_int_value(1), _double_value(1.0)] - groups[13] = [_double_value(1.1)] - groups[14] = [_int_value(int_max_value)] - groups[15] = [_int_value(int_max_value + 1)] - groups[16] = [_double_value(inf)] - - groups[17] = [_timestamp_value(123, 0)] - groups[18] = [_timestamp_value(123, 123)] - groups[19] = [_timestamp_value(345, 0)] - - # strings - groups[20] = [_string_value("")] - groups[21] = [_string_value("\u0000\ud7ff\ue000\uffff")] - groups[22] = [_string_value("(╯°□°)╯︵ ┻━┻")] - groups[23] = [_string_value("a")] - groups[24] = [_string_value("abc def")] - # latin small letter e + combining acute accent + latin small letter b - groups[25] = [_string_value("e\u0301b")] - groups[26] = [_string_value("æ")] - # latin small letter e with acute accent + latin small letter a - groups[27] = [_string_value("\u00e9a")] - - # blobs - groups[28] = [_blob_value(b"")] - groups[29] = [_blob_value(b"\x00")] - groups[30] = [_blob_value(b"\x00\x01\x02\x03\x04")] - groups[31] = [_blob_value(b"\x00\x01\x02\x04\x03")] - groups[32] = [_blob_value(b"\x7f")] - - # resource names - groups[33] = [_reference_value("projects/p1/databases/d1/documents/c1/doc1")] - groups[34] = [_reference_value("projects/p1/databases/d1/documents/c1/doc2")] - groups[35] = [ - _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc1") - ] - groups[36] = [ - _reference_value("projects/p1/databases/d1/documents/c1/doc2/c2/doc2") - ] - groups[37] = [_reference_value("projects/p1/databases/d1/documents/c10/doc1")] - groups[38] = [_reference_value("projects/p1/databases/d1/documents/c2/doc1")] - groups[39] = [_reference_value("projects/p2/databases/d2/documents/c1/doc1")] - groups[40] = [_reference_value("projects/p2/databases/d2/documents/c1-/doc1")] - groups[41] = [_reference_value("projects/p2/databases/d3/documents/c1-/doc1")] - - # geo points - groups[42] = [_geoPoint_value(-90, -180)] - groups[43] = [_geoPoint_value(-90, 0)] - groups[44] = [_geoPoint_value(-90, 180)] - groups[45] = [_geoPoint_value(0, -180)] - groups[46] = [_geoPoint_value(0, 0)] - groups[47] = [_geoPoint_value(0, 180)] - groups[48] = [_geoPoint_value(1, -180)] - groups[49] = [_geoPoint_value(1, 0)] - groups[50] = [_geoPoint_value(1, 180)] - groups[51] = [_geoPoint_value(90, -180)] - groups[52] = [_geoPoint_value(90, 0)] - groups[53] = [_geoPoint_value(90, 180)] - - # arrays - groups[54] = [_array_value()] - groups[55] = [_array_value(["bar"])] - groups[56] = [_array_value(["foo"])] - groups[57] = [_array_value(["foo", 0])] - groups[58] = [_array_value(["foo", 1])] - groups[59] = [_array_value(["foo", "0"])] - - # objects - groups[60] = [_object_value({"bar": 0})] - groups[61] = [_object_value({"bar": 0, "foo": 1})] - groups[62] = [_object_value({"bar": 1})] - groups[63] = [_object_value({"bar": 2})] - groups[64] = [_object_value({"bar": "0"})] - - target = self._make_one() - - for i in range(len(groups)): - for left in groups[i]: - for j in range(len(groups)): - for right in groups[j]: - expected = Order._compare_to(i, j) - - self.assertEqual( - target.compare(left, right), - expected, - "comparing L->R {} ({}) to {} ({})".format( - i, left, j, right - ), - ) - - expected = Order._compare_to(j, i) - self.assertEqual( - target.compare(right, left), - expected, - "comparing R->L {} ({}) to {} ({})".format( - j, right, i, left - ), - ) - - def test_typeorder_type_failure(self): - target = self._make_one() - left = mock.Mock() - left.WhichOneof.return_value = "imaginary-type" - - with self.assertRaisesRegex(ValueError, "Could not detect value"): - target.compare(left, mock.Mock()) - - def test_failure_to_find_type(self): - target = self._make_one() - left = mock.Mock() - left.WhichOneof.return_value = "imaginary-type" - right = mock.Mock() - # Patch from value to get to the deep compare. Since left is a bad type - # expect this to fail with value error. - with mock.patch.object(TypeOrder, "from_value") as to: - to.value = None - with self.assertRaisesRegex(ValueError, "Unknown ``value_type``"): - target.compare(left, right) - - def test_compare_objects_different_keys(self): - left = _object_value({"foo": 0}) - right = _object_value({"bar": 0}) - - target = self._make_one() - target.compare(left, right) - - -def _boolean_value(b): - return encode_value(b) - - -def _double_value(d): - return encode_value(d) - - -def _int_value(value): - return encode_value(value) - - -def _string_value(s): - if not isinstance(s, six.text_type): - s = six.u(s) - return encode_value(s) - - -def _reference_value(r): - return document.Value(reference_value=r) - - -def _blob_value(b): - return encode_value(b) - - -def nullValue(): - return encode_value(None) - - -def _timestamp_value(seconds, nanos): - return document.Value( - timestamp_value=timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) - ) - - -def _geoPoint_value(latitude, longitude): - return encode_value(GeoPoint(latitude, longitude)) - - -def _array_value(values=[]): - return encode_value(values) - - -def _object_value(keysAndValues): - return encode_value(keysAndValues) diff --git a/tests/unit/v1beta1/test_query.py b/tests/unit/v1beta1/test_query.py deleted file mode 100644 index 30df155d6..000000000 --- a/tests/unit/v1beta1/test_query.py +++ /dev/null @@ -1,1601 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import types -import unittest - -import mock -import pytest -import six - - -class TestQuery(unittest.TestCase): - - if six.PY2: - assertRaisesRegex = unittest.TestCase.assertRaisesRegexp - - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.query import Query - - return Query - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor_defaults(self): - query = self._make_one(mock.sentinel.parent) - self.assertIs(query._parent, mock.sentinel.parent) - self.assertIsNone(query._projection) - self.assertEqual(query._field_filters, ()) - self.assertEqual(query._orders, ()) - self.assertIsNone(query._limit) - self.assertIsNone(query._offset) - self.assertIsNone(query._start_at) - self.assertIsNone(query._end_at) - - def _make_one_all_fields(self, limit=9876, offset=12, skip_fields=(), parent=None): - kwargs = { - "projection": mock.sentinel.projection, - "field_filters": mock.sentinel.filters, - "orders": mock.sentinel.orders, - "limit": limit, - "offset": offset, - "start_at": mock.sentinel.start_at, - "end_at": mock.sentinel.end_at, - } - for field in skip_fields: - kwargs.pop(field) - if parent is None: - parent = mock.sentinel.parent - return self._make_one(parent, **kwargs) - - def test_constructor_explicit(self): - limit = 234 - offset = 56 - query = self._make_one_all_fields(limit=limit, offset=offset) - self.assertIs(query._parent, mock.sentinel.parent) - self.assertIs(query._projection, mock.sentinel.projection) - self.assertIs(query._field_filters, mock.sentinel.filters) - self.assertEqual(query._orders, mock.sentinel.orders) - self.assertEqual(query._limit, limit) - self.assertEqual(query._offset, offset) - self.assertIs(query._start_at, mock.sentinel.start_at) - self.assertIs(query._end_at, mock.sentinel.end_at) - - def test__client_property(self): - parent = mock.Mock(_client=mock.sentinel.client, spec=["_client"]) - query = self._make_one(parent) - self.assertIs(query._client, mock.sentinel.client) - - def test___eq___other_type(self): - client = self._make_one_all_fields() - other = object() - self.assertFalse(client == other) - - def test___eq___different_parent(self): - parent = mock.sentinel.parent - other_parent = mock.sentinel.other_parent - client = self._make_one_all_fields(parent=parent) - other = self._make_one_all_fields(parent=other_parent) - self.assertFalse(client == other) - - def test___eq___different_projection(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, skip_fields=("projection",)) - client._projection = mock.sentinel.projection - other = self._make_one_all_fields(parent=parent, skip_fields=("projection",)) - other._projection = mock.sentinel.other_projection - self.assertFalse(client == other) - - def test___eq___different_field_filters(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields( - parent=parent, skip_fields=("field_filters",) - ) - client._field_filters = mock.sentinel.field_filters - other = self._make_one_all_fields(parent=parent, skip_fields=("field_filters",)) - other._field_filters = mock.sentinel.other_field_filters - self.assertFalse(client == other) - - def test___eq___different_orders(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, skip_fields=("orders",)) - client._orders = mock.sentinel.orders - other = self._make_one_all_fields(parent=parent, skip_fields=("orders",)) - other._orders = mock.sentinel.other_orders - self.assertFalse(client == other) - - def test___eq___different_limit(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, limit=10) - other = self._make_one_all_fields(parent=parent, limit=20) - self.assertFalse(client == other) - - def test___eq___different_offset(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, offset=10) - other = self._make_one_all_fields(parent=parent, offset=20) - self.assertFalse(client == other) - - def test___eq___different_start_at(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, skip_fields=("start_at",)) - client._start_at = mock.sentinel.start_at - other = self._make_one_all_fields(parent=parent, skip_fields=("start_at",)) - other._start_at = mock.sentinel.other_start_at - self.assertFalse(client == other) - - def test___eq___different_end_at(self): - parent = mock.sentinel.parent - client = self._make_one_all_fields(parent=parent, skip_fields=("end_at",)) - client._end_at = mock.sentinel.end_at - other = self._make_one_all_fields(parent=parent, skip_fields=("end_at",)) - other._end_at = mock.sentinel.other_end_at - self.assertFalse(client == other) - - def test___eq___hit(self): - client = self._make_one_all_fields() - other = self._make_one_all_fields() - self.assertTrue(client == other) - - def _compare_queries(self, query1, query2, attr_name): - attrs1 = query1.__dict__.copy() - attrs2 = query2.__dict__.copy() - - attrs1.pop(attr_name) - attrs2.pop(attr_name) - - # The only different should be in ``attr_name``. - self.assertEqual(len(attrs1), len(attrs2)) - for key, value in attrs1.items(): - self.assertIs(value, attrs2[key]) - - @staticmethod - def _make_projection_for_select(field_paths): - from google.cloud.firestore_v1beta1.types import query - - return query.StructuredQuery.Projection( - fields=[ - query.StructuredQuery.FieldReference(field_path=field_path) - for field_path in field_paths - ] - ) - - def test_select_invalid_path(self): - query = self._make_one(mock.sentinel.parent) - - with self.assertRaises(ValueError): - query.select(["*"]) - - def test_select(self): - query1 = self._make_one_all_fields() - - field_paths2 = ["foo", "bar"] - query2 = query1.select(field_paths2) - self.assertIsNot(query2, query1) - self.assertIsInstance(query2, self._get_target_class()) - self.assertEqual( - query2._projection, self._make_projection_for_select(field_paths2) - ) - self._compare_queries(query1, query2, "_projection") - - # Make sure it overrides. - field_paths3 = ["foo.baz"] - query3 = query2.select(field_paths3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual( - query3._projection, self._make_projection_for_select(field_paths3) - ) - self._compare_queries(query2, query3, "_projection") - - def test_where_invalid_path(self): - query = self._make_one(mock.sentinel.parent) - - with self.assertRaises(ValueError): - query.where("*", "==", 1) - - def test_where(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - query_inst = self._make_one_all_fields(skip_fields=("field_filters",)) - new_query = query_inst.where("power.level", ">", 9000) - - self.assertIsNot(query_inst, new_query) - self.assertIsInstance(new_query, self._get_target_class()) - self.assertEqual(len(new_query._field_filters), 1) - - field_pb = new_query._field_filters[0] - expected_pb = query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path="power.level"), - op=StructuredQuery.FieldFilter.Operator.GREATER_THAN, - value=document.Value(integer_value=9000), - ) - self.assertEqual(field_pb, expected_pb) - self._compare_queries(query_inst, new_query, "_field_filters") - - def _where_unary_helper(self, value, op_enum, op_string="=="): - from google.cloud.firestore_v1beta1.types import query - - query_inst = self._make_one_all_fields(skip_fields=("field_filters",)) - field_path = "feeeld" - new_query = query_inst.where(field_path, op_string, value) - - self.assertIsNot(query_inst, new_query) - self.assertIsInstance(new_query, self._get_target_class()) - self.assertEqual(len(new_query._field_filters), 1) - - field_pb = new_query._field_filters[0] - expected_pb = query.StructuredQuery.UnaryFilter( - field=query.StructuredQuery.FieldReference(field_path=field_path), - op=op_enum, - ) - self.assertEqual(field_pb, expected_pb) - self._compare_queries(query_inst, new_query, "_field_filters") - - def test_where_eq_null(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - op_enum = StructuredQuery.UnaryFilter.Operator.IS_NULL - self._where_unary_helper(None, op_enum) - - def test_where_gt_null(self): - with self.assertRaises(ValueError): - self._where_unary_helper(None, 0, op_string=">") - - def test_where_eq_nan(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - op_enum = StructuredQuery.UnaryFilter.Operator.IS_NAN - self._where_unary_helper(float("nan"), op_enum) - - def test_where_le_nan(self): - with self.assertRaises(ValueError): - self._where_unary_helper(float("nan"), 0, op_string="<=") - - def test_where_w_delete(self): - from google.cloud.firestore_v1beta1 import DELETE_FIELD - - with self.assertRaises(ValueError): - self._where_unary_helper(DELETE_FIELD, 0) - - def test_where_w_server_timestamp(self): - from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP - - with self.assertRaises(ValueError): - self._where_unary_helper(SERVER_TIMESTAMP, 0) - - def test_where_w_array_remove(self): - from google.cloud.firestore_v1beta1 import ArrayRemove - - with self.assertRaises(ValueError): - self._where_unary_helper(ArrayRemove([1, 3, 5]), 0) - - def test_where_w_array_union(self): - from google.cloud.firestore_v1beta1 import ArrayUnion - - with self.assertRaises(ValueError): - self._where_unary_helper(ArrayUnion([2, 4, 8]), 0) - - def test_order_by_invalid_path(self): - query = self._make_one(mock.sentinel.parent) - - with self.assertRaises(ValueError): - query.order_by("*") - - def test_order_by(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - klass = self._get_target_class() - query1 = self._make_one_all_fields(skip_fields=("orders",)) - - field_path2 = "a" - query2 = query1.order_by(field_path2) - self.assertIsNot(query2, query1) - self.assertIsInstance(query2, klass) - order = _make_order_pb(field_path2, StructuredQuery.Direction.ASCENDING) - self.assertEqual(query2._orders, (order,)) - self._compare_queries(query1, query2, "_orders") - - # Make sure it appends to the orders. - field_path3 = "b" - query3 = query2.order_by(field_path3, direction=klass.DESCENDING) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, klass) - order_pb3 = _make_order_pb(field_path3, StructuredQuery.Direction.DESCENDING) - self.assertEqual(query3._orders, (order, order_pb3)) - self._compare_queries(query2, query3, "_orders") - - def test_limit(self): - query1 = self._make_one_all_fields() - - limit2 = 100 - query2 = query1.limit(limit2) - self.assertIsNot(query2, query1) - self.assertIsInstance(query2, self._get_target_class()) - self.assertEqual(query2._limit, limit2) - self._compare_queries(query1, query2, "_limit") - - # Make sure it overrides. - limit3 = 10 - query3 = query2.limit(limit3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._limit, limit3) - self._compare_queries(query2, query3, "_limit") - - def test_offset(self): - query1 = self._make_one_all_fields() - - offset2 = 23 - query2 = query1.offset(offset2) - self.assertIsNot(query2, query1) - self.assertIsInstance(query2, self._get_target_class()) - self.assertEqual(query2._offset, offset2) - self._compare_queries(query1, query2, "_offset") - - # Make sure it overrides. - offset3 = 35 - query3 = query2.offset(offset3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._offset, offset3) - self._compare_queries(query2, query3, "_offset") - - @staticmethod - def _make_collection(*path, **kw): - from google.cloud.firestore_v1beta1 import collection - - return collection.CollectionReference(*path, **kw) - - @staticmethod - def _make_docref(*path, **kw): - from google.cloud.firestore_v1beta1 import document - - return document.DocumentReference(*path, **kw) - - @staticmethod - def _make_snapshot(docref, values): - from google.cloud.firestore_v1beta1 import document - - return document.DocumentSnapshot(docref, values, True, None, None, None) - - def test__cursor_helper_w_dict(self): - values = {"a": 7, "b": "foo"} - query1 = self._make_one(mock.sentinel.parent) - query2 = query1._cursor_helper(values, True, True) - - self.assertIs(query2._parent, mock.sentinel.parent) - self.assertIsNone(query2._projection) - self.assertEqual(query2._field_filters, ()) - self.assertEqual(query2._orders, query1._orders) - self.assertIsNone(query2._limit) - self.assertIsNone(query2._offset) - self.assertIsNone(query2._end_at) - - cursor, before = query2._start_at - - self.assertEqual(cursor, values) - self.assertTrue(before) - - def test__cursor_helper_w_tuple(self): - values = (7, "foo") - query1 = self._make_one(mock.sentinel.parent) - query2 = query1._cursor_helper(values, False, True) - - self.assertIs(query2._parent, mock.sentinel.parent) - self.assertIsNone(query2._projection) - self.assertEqual(query2._field_filters, ()) - self.assertEqual(query2._orders, query1._orders) - self.assertIsNone(query2._limit) - self.assertIsNone(query2._offset) - self.assertIsNone(query2._end_at) - - cursor, before = query2._start_at - - self.assertEqual(cursor, list(values)) - self.assertFalse(before) - - def test__cursor_helper_w_list(self): - values = [7, "foo"] - query1 = self._make_one(mock.sentinel.parent) - query2 = query1._cursor_helper(values, True, False) - - self.assertIs(query2._parent, mock.sentinel.parent) - self.assertIsNone(query2._projection) - self.assertEqual(query2._field_filters, ()) - self.assertEqual(query2._orders, query1._orders) - self.assertIsNone(query2._limit) - self.assertIsNone(query2._offset) - self.assertIsNone(query2._start_at) - - cursor, before = query2._end_at - - self.assertEqual(cursor, values) - self.assertIsNot(cursor, values) - self.assertTrue(before) - - def test__cursor_helper_w_snapshot_wrong_collection(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("there", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query = self._make_one(collection) - - with self.assertRaises(ValueError): - query._cursor_helper(snapshot, False, False) - - def test__cursor_helper_w_snapshot(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query1 = self._make_one(collection) - - query2 = query1._cursor_helper(snapshot, False, False) - - self.assertIs(query2._parent, collection) - self.assertIsNone(query2._projection) - self.assertEqual(query2._field_filters, ()) - self.assertEqual(query2._orders, ()) - self.assertIsNone(query2._limit) - self.assertIsNone(query2._offset) - self.assertIsNone(query2._start_at) - - cursor, before = query2._end_at - - self.assertIs(cursor, snapshot) - self.assertFalse(before) - - def test_start_at(self): - collection = self._make_collection("here") - query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) - query2 = query1.order_by("hi") - - document_fields3 = {"hi": "mom"} - query3 = query2.start_at(document_fields3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._start_at, (document_fields3, True)) - self._compare_queries(query2, query3, "_start_at") - - # Make sure it overrides. - query4 = query3.order_by("bye") - values5 = {"hi": "zap", "bye": 88} - docref = self._make_docref("here", "doc_id") - document_fields5 = self._make_snapshot(docref, values5) - query5 = query4.start_at(document_fields5) - self.assertIsNot(query5, query4) - self.assertIsInstance(query5, self._get_target_class()) - self.assertEqual(query5._start_at, (document_fields5, True)) - self._compare_queries(query4, query5, "_start_at") - - def test_start_after(self): - collection = self._make_collection("here") - query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) - query2 = query1.order_by("down") - - document_fields3 = {"down": 99.75} - query3 = query2.start_after(document_fields3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._start_at, (document_fields3, False)) - self._compare_queries(query2, query3, "_start_at") - - # Make sure it overrides. - query4 = query3.order_by("out") - values5 = {"down": 100.25, "out": b"\x00\x01"} - docref = self._make_docref("here", "doc_id") - document_fields5 = self._make_snapshot(docref, values5) - query5 = query4.start_after(document_fields5) - self.assertIsNot(query5, query4) - self.assertIsInstance(query5, self._get_target_class()) - self.assertEqual(query5._start_at, (document_fields5, False)) - self._compare_queries(query4, query5, "_start_at") - - def test_end_before(self): - collection = self._make_collection("here") - query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) - query2 = query1.order_by("down") - - document_fields3 = {"down": 99.75} - query3 = query2.end_before(document_fields3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._end_at, (document_fields3, True)) - self._compare_queries(query2, query3, "_end_at") - - # Make sure it overrides. - query4 = query3.order_by("out") - values5 = {"down": 100.25, "out": b"\x00\x01"} - docref = self._make_docref("here", "doc_id") - document_fields5 = self._make_snapshot(docref, values5) - query5 = query4.end_before(document_fields5) - self.assertIsNot(query5, query4) - self.assertIsInstance(query5, self._get_target_class()) - self.assertEqual(query5._end_at, (document_fields5, True)) - self._compare_queries(query4, query5, "_end_at") - self._compare_queries(query4, query5, "_end_at") - - def test_end_at(self): - collection = self._make_collection("here") - query1 = self._make_one_all_fields(parent=collection, skip_fields=("orders",)) - query2 = query1.order_by("hi") - - document_fields3 = {"hi": "mom"} - query3 = query2.end_at(document_fields3) - self.assertIsNot(query3, query2) - self.assertIsInstance(query3, self._get_target_class()) - self.assertEqual(query3._end_at, (document_fields3, False)) - self._compare_queries(query2, query3, "_end_at") - - # Make sure it overrides. - query4 = query3.order_by("bye") - values5 = {"hi": "zap", "bye": 88} - docref = self._make_docref("here", "doc_id") - document_fields5 = self._make_snapshot(docref, values5) - query5 = query4.end_at(document_fields5) - self.assertIsNot(query5, query4) - self.assertIsInstance(query5, self._get_target_class()) - self.assertEqual(query5._end_at, (document_fields5, False)) - self._compare_queries(query4, query5, "_end_at") - - def test__filters_pb_empty(self): - query = self._make_one(mock.sentinel.parent) - self.assertEqual(len(query._field_filters), 0) - self.assertIsNone(query._filters_pb()) - - def test__filters_pb_single(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - query1 = self._make_one(mock.sentinel.parent) - query2 = query1.where("x.y", ">", 50.5) - filter_pb = query2._filters_pb() - expected_pb = query.StructuredQuery.Filter( - field_filter=query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path="x.y"), - op=StructuredQuery.FieldFilter.Operator.GREATER_THAN, - value=document.Value(double_value=50.5), - ) - ) - self.assertEqual(filter_pb, expected_pb) - - def test__filters_pb_multi(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - query1 = self._make_one(mock.sentinel.parent) - query2 = query1.where("x.y", ">", 50.5) - query3 = query2.where("ABC", "==", 123) - - filter_pb = query3._filters_pb() - op_class = StructuredQuery.FieldFilter.Operator - expected_pb = query.StructuredQuery.Filter( - composite_filter=query.StructuredQuery.CompositeFilter( - op=StructuredQuery.CompositeFilter.Operator.AND, - filters=[ - query.StructuredQuery.Filter( - field_filter=query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference( - field_path="x.y" - ), - op=op_class.GREATER_THAN, - value=document.Value(double_value=50.5), - ) - ), - query.StructuredQuery.Filter( - field_filter=query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference( - field_path="ABC" - ), - op=op_class.EQUAL, - value=document.Value(integer_value=123), - ) - ), - ], - ) - ) - self.assertEqual(filter_pb, expected_pb) - - def test__normalize_projection_none(self): - query = self._make_one(mock.sentinel.parent) - self.assertIsNone(query._normalize_projection(None)) - - def test__normalize_projection_empty(self): - projection = self._make_projection_for_select([]) - query = self._make_one(mock.sentinel.parent) - normalized = query._normalize_projection(projection) - field_paths = [field_ref.field_path for field_ref in normalized.fields] - self.assertEqual(field_paths, ["__name__"]) - - def test__normalize_projection_non_empty(self): - projection = self._make_projection_for_select(["a", "b"]) - query = self._make_one(mock.sentinel.parent) - self.assertIs(query._normalize_projection(projection), projection) - - def test__normalize_orders_wo_orders_wo_cursors(self): - query = self._make_one(mock.sentinel.parent) - expected = [] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_orders_w_orders_wo_cursors(self): - query = self._make_one(mock.sentinel.parent).order_by("a") - expected = [query._make_order("a", "ASCENDING")] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_orders_wo_orders_w_snapshot_cursor(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query = self._make_one(collection).start_at(snapshot) - expected = [query._make_order("__name__", "ASCENDING")] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_orders_w_name_orders_w_snapshot_cursor(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query = ( - self._make_one(collection) - .order_by("__name__", "DESCENDING") - .start_at(snapshot) - ) - expected = [query._make_order("__name__", "DESCENDING")] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_exists(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query = ( - self._make_one(collection) - .where("c", "<=", 20) - .order_by("c", "DESCENDING") - .start_at(snapshot) - ) - expected = [ - query._make_order("c", "DESCENDING"), - query._make_order("__name__", "DESCENDING"), - ] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_orders_wo_orders_w_snapshot_cursor_w_neq_where(self): - values = {"a": 7, "b": "foo"} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - collection = self._make_collection("here") - query = self._make_one(collection).where("c", "<=", 20).end_at(snapshot) - expected = [ - query._make_order("c", "ASCENDING"), - query._make_order("__name__", "ASCENDING"), - ] - self.assertEqual(query._normalize_orders(), expected) - - def test__normalize_cursor_none(self): - query = self._make_one(mock.sentinel.parent) - self.assertIsNone(query._normalize_cursor(None, query._orders)) - - def test__normalize_cursor_no_order(self): - cursor = ([1], True) - query = self._make_one(mock.sentinel.parent) - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_as_list_mismatched_order(self): - cursor = ([1, 2], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_as_dict_mismatched_order(self): - cursor = ({"a": 1}, True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_w_delete(self): - from google.cloud.firestore_v1beta1 import DELETE_FIELD - - cursor = ([DELETE_FIELD], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_w_server_timestamp(self): - from google.cloud.firestore_v1beta1 import SERVER_TIMESTAMP - - cursor = ([SERVER_TIMESTAMP], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_w_array_remove(self): - from google.cloud.firestore_v1beta1 import ArrayRemove - - cursor = ([ArrayRemove([1, 3, 5])], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_w_array_union(self): - from google.cloud.firestore_v1beta1 import ArrayUnion - - cursor = ([ArrayUnion([2, 4, 8])], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - with self.assertRaises(ValueError): - query._normalize_cursor(cursor, query._orders) - - def test__normalize_cursor_as_list_hit(self): - cursor = ([1], True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) - - def test__normalize_cursor_as_dict_hit(self): - cursor = ({"b": 1}, True) - query = self._make_one(mock.sentinel.parent).order_by("b", "ASCENDING") - - self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) - - def test__normalize_cursor_as_snapshot_hit(self): - values = {"b": 1} - docref = self._make_docref("here", "doc_id") - snapshot = self._make_snapshot(docref, values) - cursor = (snapshot, True) - collection = self._make_collection("here") - query = self._make_one(collection).order_by("b", "ASCENDING") - - self.assertEqual(query._normalize_cursor(cursor, query._orders), ([1], True)) - - def test__normalize_cursor_w___name___w_reference(self): - db_string = "projects/my-project/database/(default)" - client = mock.Mock(spec=["_database_string"]) - client._database_string = db_string - parent = mock.Mock(spec=["_path", "_client"]) - parent._client = client - parent._path = ["C"] - query = self._make_one(parent).order_by("__name__", "ASCENDING") - docref = self._make_docref("here", "doc_id") - values = {"a": 7} - snapshot = self._make_snapshot(docref, values) - expected = docref - cursor = (snapshot, True) - - self.assertEqual( - query._normalize_cursor(cursor, query._orders), ([expected], True) - ) - - def test__normalize_cursor_w___name___wo_slash(self): - db_string = "projects/my-project/database/(default)" - client = mock.Mock(spec=["_database_string"]) - client._database_string = db_string - parent = mock.Mock(spec=["_path", "_client", "document"]) - parent._client = client - parent._path = ["C"] - document = parent.document.return_value = mock.Mock(spec=[]) - query = self._make_one(parent).order_by("__name__", "ASCENDING") - cursor = (["b"], True) - expected = document - - self.assertEqual( - query._normalize_cursor(cursor, query._orders), ([expected], True) - ) - parent.document.assert_called_once_with("b") - - def test__to_protobuf_all_fields(self): - from google.protobuf import wrappers_pb2 - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="cat", spec=["id"]) - query1 = self._make_one(parent) - query2 = query1.select(["X", "Y", "Z"]) - query3 = query2.where("Y", ">", 2.5) - query4 = query3.order_by("X") - query5 = query4.limit(17) - query6 = query5.offset(3) - query7 = query6.start_at({"X": 10}) - query8 = query7.end_at({"X": 25}) - - structured_query_pb = query8._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "select": query.StructuredQuery.Projection( - fields=[ - query.StructuredQuery.FieldReference(field_path=field_path) - for field_path in ["X", "Y", "Z"] - ] - ), - "where": query.StructuredQuery.Filter( - field_filter=query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path="Y"), - op=StructuredQuery.FieldFilter.Operator.GREATER_THAN, - value=document.Value(double_value=2.5), - ) - ), - "order_by": [_make_order_pb("X", StructuredQuery.Direction.ASCENDING)], - "start_at": query.Cursor( - values=[document.Value(integer_value=10)], before=True - ), - "end_at": query.Cursor(values=[document.Value(integer_value=25)]), - "offset": 3, - "limit": wrappers_pb2.Int32Value(value=17), - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_select_only(self): - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="cat", spec=["id"]) - query1 = self._make_one(parent) - field_paths = ["a.b", "a.c", "d"] - query2 = query1.select(field_paths) - - structured_query_pb = query2._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "select": query.StructuredQuery.Projection( - fields=[ - query.StructuredQuery.FieldReference(field_path=field_path) - for field_path in field_paths - ] - ), - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_where_only(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="dog", spec=["id"]) - query1 = self._make_one(parent) - query2 = query1.where("a", "==", u"b") - - structured_query_pb = query2._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "where": query.StructuredQuery.Filter( - field_filter=query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path="a"), - op=StructuredQuery.FieldFilter.Operator.EQUAL, - value=document.Value(string_value=u"b"), - ) - ), - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_order_by_only(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="fish", spec=["id"]) - query1 = self._make_one(parent) - query2 = query1.order_by("abc") - - structured_query_pb = query2._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "order_by": [_make_order_pb("abc", StructuredQuery.Direction.ASCENDING)], - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_start_at_only(self): - # NOTE: "only" is wrong since we must have ``order_by`` as well. - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="phish", spec=["id"]) - query_inst = ( - self._make_one(parent).order_by("X.Y").start_after({"X": {"Y": u"Z"}}) - ) - - structured_query_pb = query_inst._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "order_by": [_make_order_pb("X.Y", StructuredQuery.Direction.ASCENDING)], - "start_at": query.Cursor(values=[document.Value(string_value=u"Z")]), - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_end_at_only(self): - # NOTE: "only" is wrong since we must have ``order_by`` as well. - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="ghoti", spec=["id"]) - query_inst = self._make_one(parent).order_by("a").end_at({"a": 88}) - - structured_query_pb = query_inst._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "order_by": [_make_order_pb("a", StructuredQuery.Direction.ASCENDING)], - "end_at": query.Cursor(values=[document.Value(integer_value=88)]), - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_offset_only(self): - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="cartt", spec=["id"]) - query1 = self._make_one(parent) - offset = 14 - query2 = query1.offset(offset) - - structured_query_pb = query2._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "offset": offset, - } - expected_pb = query.StructuredQuery(**query_kwargs) - self.assertEqual(structured_query_pb, expected_pb) - - def test__to_protobuf_limit_only(self): - from google.protobuf import wrappers_pb2 - from google.cloud.firestore_v1beta1.types import query - - parent = mock.Mock(id="donut", spec=["id"]) - query1 = self._make_one(parent) - limit = 31 - query2 = query1.limit(limit) - - structured_query_pb = query2._to_protobuf() - query_kwargs = { - "from_": [ - query.StructuredQuery.CollectionSelector(collection_id=parent.id) - ], - "limit": wrappers_pb2.Int32Value(value=limit), - } - expected_pb = query.StructuredQuery(**query_kwargs) - - self.assertEqual(structured_query_pb, expected_pb) - - def test_get_simple(self): - import warnings - - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("dee") - - # Add a dummy response to the minimal fake GAPIC. - _, expected_prefix = parent._parent_info() - name = "{}/sleep".format(expected_prefix) - data = {"snooze": 10} - response_pb = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter([response_pb]) - - # Execute the query and check the response. - query = self._make_one(parent) - - with warnings.catch_warnings(record=True) as warned: - get_response = query.get() - - self.assertIsInstance(get_response, types.GeneratorType) - returned = list(get_response) - self.assertEqual(len(returned), 1) - snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ("dee", "sleep")) - self.assertEqual(snapshot.to_dict(), data) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - # Verify the deprecation - self.assertEqual(len(warned), 1) - self.assertIs(warned[0].category, DeprecationWarning) - - def test_stream_simple(self): - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("dee") - - # Add a dummy response to the minimal fake GAPIC. - _, expected_prefix = parent._parent_info() - name = "{}/sleep".format(expected_prefix) - data = {"snooze": 10} - response_pb = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter([response_pb]) - - # Execute the query and check the response. - query = self._make_one(parent) - get_response = query.stream() - self.assertIsInstance(get_response, types.GeneratorType) - returned = list(get_response) - self.assertEqual(len(returned), 1) - snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ("dee", "sleep")) - self.assertEqual(snapshot.to_dict(), data) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_stream_with_transaction(self): - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Create a real-ish transaction for this client. - transaction = client.transaction() - txn_id = b"\x00\x00\x01-work-\xf2" - transaction._id = txn_id - - # Make a **real** collection reference as parent. - parent = client.collection("declaration") - - # Add a dummy response to the minimal fake GAPIC. - parent_path, expected_prefix = parent._parent_info() - name = "{}/burger".format(expected_prefix) - data = {"lettuce": b"\xee\x87"} - response_pb = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter([response_pb]) - - # Execute the query and check the response. - query = self._make_one(parent) - get_response = query.stream(transaction=transaction) - self.assertIsInstance(get_response, types.GeneratorType) - returned = list(get_response) - self.assertEqual(len(returned), 1) - snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ("declaration", "burger")) - self.assertEqual(snapshot.to_dict(), data) - - # Verify the mock call. - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - def test_stream_no_results(self): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["run_query"]) - empty_response = _make_query_response() - run_query_response = iter([empty_response]) - firestore_api.run_query.return_value = run_query_response - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("dah", "dah", "dum") - query = self._make_one(parent) - - get_response = query.stream() - self.assertIsInstance(get_response, types.GeneratorType) - self.assertEqual(list(get_response), []) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_stream_second_response_in_empty_stream(self): - # Create a minimal fake GAPIC with a dummy response. - firestore_api = mock.Mock(spec=["run_query"]) - empty_response1 = _make_query_response() - empty_response2 = _make_query_response() - run_query_response = iter([empty_response1, empty_response2]) - firestore_api.run_query.return_value = run_query_response - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("dah", "dah", "dum") - query = self._make_one(parent) - - get_response = query.stream() - self.assertIsInstance(get_response, types.GeneratorType) - self.assertEqual(list(get_response), []) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_stream_with_skipped_results(self): - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("talk", "and", "chew-gum") - - # Add two dummy responses to the minimal fake GAPIC. - _, expected_prefix = parent._parent_info() - response_pb1 = _make_query_response(skipped_results=1) - name = "{}/clock".format(expected_prefix) - data = {"noon": 12, "nested": {"bird": 10.5}} - response_pb2 = _make_query_response(name=name, data=data) - firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) - - # Execute the query and check the response. - query = self._make_one(parent) - get_response = query.stream() - self.assertIsInstance(get_response, types.GeneratorType) - returned = list(get_response) - self.assertEqual(len(returned), 1) - snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ("talk", "and", "chew-gum", "clock")) - self.assertEqual(snapshot.to_dict(), data) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - def test_stream_empty_after_first_response(self): - # Create a minimal fake GAPIC. - firestore_api = mock.Mock(spec=["run_query"]) - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Make a **real** collection reference as parent. - parent = client.collection("charles") - - # Add two dummy responses to the minimal fake GAPIC. - _, expected_prefix = parent._parent_info() - name = "{}/bark".format(expected_prefix) - data = {"lee": "hoop"} - response_pb1 = _make_query_response(name=name, data=data) - response_pb2 = _make_query_response() - firestore_api.run_query.return_value = iter([response_pb1, response_pb2]) - - # Execute the query and check the response. - query = self._make_one(parent) - get_response = query.stream() - self.assertIsInstance(get_response, types.GeneratorType) - returned = list(get_response) - self.assertEqual(len(returned), 1) - snapshot = returned[0] - self.assertEqual(snapshot.reference._path, ("charles", "bark")) - self.assertEqual(snapshot.to_dict(), data) - - # Verify the mock call. - parent_path, _ = parent._parent_info() - firestore_api.run_query.assert_called_once_with( - request={ - "parent": parent_path, - "structured_query": query._to_protobuf(), - "transaction": None, - }, - metadata=client._rpc_metadata, - ) - - @mock.patch("google.cloud.firestore_v1beta1.query.Watch", autospec=True) - def test_on_snapshot(self, watch): - query = self._make_one(mock.sentinel.parent) - query.on_snapshot(None) - watch.for_query.assert_called_once() - - def test_comparator_no_ordering(self): - query = self._make_one(mock.sentinel.parent) - query._orders = [] - doc1 = mock.Mock() - doc1.reference._path = ("col", "adocument1") - - doc2 = mock.Mock() - doc2.reference._path = ("col", "adocument2") - - sort = query._comparator(doc1, doc2) - self.assertEqual(sort, -1) - - def test_comparator_no_ordering_same_id(self): - query = self._make_one(mock.sentinel.parent) - query._orders = [] - doc1 = mock.Mock() - doc1.reference._path = ("col", "adocument1") - - doc2 = mock.Mock() - doc2.reference._path = ("col", "adocument1") - - sort = query._comparator(doc1, doc2) - self.assertEqual(sort, 0) - - def test_comparator_ordering(self): - query = self._make_one(mock.sentinel.parent) - orderByMock = mock.Mock() - orderByMock.field.field_path = "last" - orderByMock.direction = 1 # ascending - query._orders = [orderByMock] - - doc1 = mock.Mock() - doc1.reference._path = ("col", "adocument1") - doc1._data = { - "first": {"stringValue": "Ada"}, - "last": {"stringValue": "secondlovelace"}, - } - doc2 = mock.Mock() - doc2.reference._path = ("col", "adocument2") - doc2._data = { - "first": {"stringValue": "Ada"}, - "last": {"stringValue": "lovelace"}, - } - - sort = query._comparator(doc1, doc2) - self.assertEqual(sort, 1) - - def test_comparator_ordering_descending(self): - query = self._make_one(mock.sentinel.parent) - orderByMock = mock.Mock() - orderByMock.field.field_path = "last" - orderByMock.direction = -1 # descending - query._orders = [orderByMock] - - doc1 = mock.Mock() - doc1.reference._path = ("col", "adocument1") - doc1._data = { - "first": {"stringValue": "Ada"}, - "last": {"stringValue": "secondlovelace"}, - } - doc2 = mock.Mock() - doc2.reference._path = ("col", "adocument2") - doc2._data = { - "first": {"stringValue": "Ada"}, - "last": {"stringValue": "lovelace"}, - } - - sort = query._comparator(doc1, doc2) - self.assertEqual(sort, -1) - - def test_comparator_missing_order_by_field_in_data_raises(self): - query = self._make_one(mock.sentinel.parent) - orderByMock = mock.Mock() - orderByMock.field.field_path = "last" - orderByMock.direction = 1 # ascending - query._orders = [orderByMock] - - doc1 = mock.Mock() - doc1.reference._path = ("col", "adocument1") - doc1._data = {} - doc2 = mock.Mock() - doc2.reference._path = ("col", "adocument2") - doc2._data = { - "first": {"stringValue": "Ada"}, - "last": {"stringValue": "lovelace"}, - } - - with self.assertRaisesRegex(ValueError, "Can only compare fields "): - query._comparator(doc1, doc2) - - -class Test__enum_from_op_string(unittest.TestCase): - @staticmethod - def _call_fut(op_string): - from google.cloud.firestore_v1beta1.query import _enum_from_op_string - - return _enum_from_op_string(op_string) - - def test_success(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - op_class = StructuredQuery.FieldFilter.Operator - self.assertEqual(self._call_fut("<"), op_class.LESS_THAN) - self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL) - self.assertEqual(self._call_fut("=="), op_class.EQUAL) - self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL) - self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN) - self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS) - - def test_failure(self): - with self.assertRaises(ValueError): - self._call_fut("?") - - -class Test__isnan(unittest.TestCase): - @staticmethod - def _call_fut(value): - from google.cloud.firestore_v1beta1.query import _isnan - - return _isnan(value) - - def test_valid(self): - self.assertTrue(self._call_fut(float("nan"))) - - def test_invalid(self): - self.assertFalse(self._call_fut(51.5)) - self.assertFalse(self._call_fut(None)) - self.assertFalse(self._call_fut("str")) - self.assertFalse(self._call_fut(int)) - self.assertFalse(self._call_fut(1.0 + 1.0j)) - - -class Test__enum_from_direction(unittest.TestCase): - @staticmethod - def _call_fut(direction): - from google.cloud.firestore_v1beta1.query import _enum_from_direction - - return _enum_from_direction(direction) - - def test_success(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.query import Query - - dir_class = StructuredQuery.Direction - self.assertEqual(self._call_fut(Query.ASCENDING), dir_class.ASCENDING) - self.assertEqual(self._call_fut(Query.DESCENDING), dir_class.DESCENDING) - - # Ints pass through - self.assertEqual(self._call_fut(dir_class.ASCENDING), dir_class.ASCENDING) - self.assertEqual(self._call_fut(dir_class.DESCENDING), dir_class.DESCENDING) - - def test_failure(self): - with self.assertRaises(ValueError): - self._call_fut("neither-ASCENDING-nor-DESCENDING") - - -class Test__filter_pb(unittest.TestCase): - @staticmethod - def _call_fut(field_or_unary): - from google.cloud.firestore_v1beta1.query import _filter_pb - - return _filter_pb(field_or_unary) - - def test_unary(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import query - - unary_pb = query.StructuredQuery.UnaryFilter( - field=query.StructuredQuery.FieldReference(field_path="a.b.c"), - op=StructuredQuery.UnaryFilter.Operator.IS_NULL, - ) - filter_pb = self._call_fut(unary_pb) - expected_pb = query.StructuredQuery.Filter(unary_filter=unary_pb) - self.assertEqual(filter_pb, expected_pb) - - def test_field(self): - from google.cloud.firestore_v1beta1.types import StructuredQuery - - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import query - - field_filter_pb = query.StructuredQuery.FieldFilter( - field=query.StructuredQuery.FieldReference(field_path="XYZ"), - op=StructuredQuery.FieldFilter.Operator.GREATER_THAN, - value=document.Value(double_value=90.75), - ) - filter_pb = self._call_fut(field_filter_pb) - expected_pb = query.StructuredQuery.Filter(field_filter=field_filter_pb) - self.assertEqual(filter_pb, expected_pb) - - def test_bad_type(self): - with self.assertRaises(ValueError): - self._call_fut(None) - - -class Test__cursor_pb(unittest.TestCase): - @staticmethod - def _call_fut(cursor_pair): - from google.cloud.firestore_v1beta1.query import _cursor_pb - - return _cursor_pb(cursor_pair) - - def test_no_pair(self): - self.assertIsNone(self._call_fut(None)) - - def test_success(self): - from google.cloud.firestore_v1beta1.types import query - from google.cloud.firestore_v1beta1 import _helpers - - data = [1.5, 10, True] - cursor_pair = data, True - - cursor_pb = self._call_fut(cursor_pair) - - expected_pb = query.Cursor( - values=[_helpers.encode_value(value) for value in data], before=True - ) - self.assertEqual(cursor_pb, expected_pb) - - -class Test__query_response_to_snapshot(unittest.TestCase): - @staticmethod - def _call_fut(response_pb, collection, expected_prefix): - from google.cloud.firestore_v1beta1.query import _query_response_to_snapshot - - return _query_response_to_snapshot(response_pb, collection, expected_prefix) - - def test_empty(self): - response_pb = _make_query_response() - snapshot = self._call_fut(response_pb, None, None) - self.assertIsNone(snapshot) - - def test_after_offset(self): - skipped_results = 410 - response_pb = _make_query_response(skipped_results=skipped_results) - snapshot = self._call_fut(response_pb, None, None) - self.assertIsNone(snapshot) - - def test_response(self): - from google.cloud.firestore_v1beta1.document import DocumentSnapshot - - client = _make_client() - collection = client.collection("a", "b", "c") - _, expected_prefix = collection._parent_info() - - # Create name for the protobuf. - doc_id = "gigantic" - name = "{}/{}".format(expected_prefix, doc_id) - data = {"a": 901, "b": True} - response_pb = _make_query_response(name=name, data=data) - - snapshot = self._call_fut(response_pb, collection, expected_prefix) - self.assertIsInstance(snapshot, DocumentSnapshot) - expected_path = collection._path + (doc_id,) - self.assertEqual(snapshot.reference._path, expected_path) - self.assertEqual(snapshot.to_dict(), data) - self.assertTrue(snapshot.exists) - self.assertEqual(snapshot.read_time, response_pb._pb.read_time) - self.assertEqual(snapshot.create_time, response_pb._pb.document.create_time) - self.assertEqual(snapshot.update_time, response_pb._pb.document.update_time) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="project-project"): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - return Client(project=project, credentials=credentials) - - -def _make_order_pb(field_path, direction): - from google.cloud.firestore_v1beta1.types import query - - return query.StructuredQuery.Order( - field=query.StructuredQuery.FieldReference(field_path=field_path), - direction=direction, - ) - - -def _make_query_response(**kwargs): - # kwargs supported are ``skipped_results``, ``name`` and ``data`` - from google.cloud.firestore_v1beta1.types import document - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud.firestore_v1beta1 import _helpers - - now = datetime.datetime.utcnow() - read_time = _datetime_to_pb_timestamp(now) - kwargs["read_time"] = read_time - - name = kwargs.pop("name", None) - data = kwargs.pop("data", None) - if name is not None and data is not None: - document_pb = document.Document(name=name, fields=_helpers.encode_dict(data)) - delta = datetime.timedelta(seconds=100) - update_time = _datetime_to_pb_timestamp(now - delta) - create_time = _datetime_to_pb_timestamp(now - 2 * delta) - document_pb._pb.update_time.CopyFrom(update_time) - document_pb._pb.create_time.CopyFrom(create_time) - - kwargs["document"] = document_pb - - return firestore.RunQueryResponse(**kwargs) diff --git a/tests/unit/v1beta1/test_transaction.py b/tests/unit/v1beta1/test_transaction.py deleted file mode 100644 index 1a46cca77..000000000 --- a/tests/unit/v1beta1/test_transaction.py +++ /dev/null @@ -1,1047 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock -import pytest - - -class TestTransaction(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.transaction import Transaction - - return Transaction - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor_defaults(self): - from google.cloud.firestore_v1beta1.transaction import MAX_ATTEMPTS - - transaction = self._make_one(mock.sentinel.client) - self.assertIs(transaction._client, mock.sentinel.client) - self.assertEqual(transaction._write_pbs, []) - self.assertEqual(transaction._max_attempts, MAX_ATTEMPTS) - self.assertFalse(transaction._read_only) - self.assertIsNone(transaction._id) - - def test_constructor_explicit(self): - transaction = self._make_one( - mock.sentinel.client, max_attempts=10, read_only=True - ) - self.assertIs(transaction._client, mock.sentinel.client) - self.assertEqual(transaction._write_pbs, []) - self.assertEqual(transaction._max_attempts, 10) - self.assertTrue(transaction._read_only) - self.assertIsNone(transaction._id) - - def test__add_write_pbs_failure(self): - from google.cloud.firestore_v1beta1.transaction import _WRITE_READ_ONLY - - batch = self._make_one(mock.sentinel.client, read_only=True) - self.assertEqual(batch._write_pbs, []) - with self.assertRaises(ValueError) as exc_info: - batch._add_write_pbs([mock.sentinel.write]) - - self.assertEqual(exc_info.exception.args, (_WRITE_READ_ONLY,)) - self.assertEqual(batch._write_pbs, []) - - def test__add_write_pbs(self): - batch = self._make_one(mock.sentinel.client) - self.assertEqual(batch._write_pbs, []) - batch._add_write_pbs([mock.sentinel.write]) - self.assertEqual(batch._write_pbs, [mock.sentinel.write]) - - def test__options_protobuf_read_only(self): - from google.cloud.firestore_v1beta1.types import common - - transaction = self._make_one(mock.sentinel.client, read_only=True) - options_pb = transaction._options_protobuf(None) - expected_pb = common.TransactionOptions( - read_only=common.TransactionOptions.ReadOnly() - ) - self.assertEqual(options_pb, expected_pb) - - def test__options_protobuf_read_only_retry(self): - from google.cloud.firestore_v1beta1.transaction import _CANT_RETRY_READ_ONLY - - transaction = self._make_one(mock.sentinel.client, read_only=True) - retry_id = b"illuminate" - - with self.assertRaises(ValueError) as exc_info: - transaction._options_protobuf(retry_id) - - self.assertEqual(exc_info.exception.args, (_CANT_RETRY_READ_ONLY,)) - - def test__options_protobuf_read_write(self): - transaction = self._make_one(mock.sentinel.client) - options_pb = transaction._options_protobuf(None) - self.assertIsNone(options_pb) - - def test__options_protobuf_on_retry(self): - from google.cloud.firestore_v1beta1.types import common - - transaction = self._make_one(mock.sentinel.client) - retry_id = b"hocus-pocus" - options_pb = transaction._options_protobuf(retry_id) - expected_pb = common.TransactionOptions( - read_write=common.TransactionOptions.ReadWrite(retry_transaction=retry_id) - ) - self.assertEqual(options_pb, expected_pb) - - def test_in_progress_property(self): - transaction = self._make_one(mock.sentinel.client) - self.assertFalse(transaction.in_progress) - transaction._id = b"not-none-bites" - self.assertTrue(transaction.in_progress) - - def test_id_property(self): - transaction = self._make_one(mock.sentinel.client) - transaction._id = mock.sentinel.eye_dee - self.assertIs(transaction.id, mock.sentinel.eye_dee) - - def test__begin(self): - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - from google.cloud.firestore_v1beta1.types import firestore - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - txn_id = b"to-begin" - response = firestore.BeginTransactionResponse(transaction=txn_id) - firestore_api.begin_transaction.return_value = response - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a transaction and ``begin()`` it. - transaction = self._make_one(client) - self.assertIsNone(transaction._id) - - ret_val = transaction._begin() - self.assertIsNone(ret_val) - self.assertEqual(transaction._id, txn_id) - - # Verify the called mock. - firestore_api.begin_transaction.assert_called_once_with( - request={"database": client._database_string, "options": None}, - metadata=client._rpc_metadata, - ) - - def test__begin_failure(self): - from google.cloud.firestore_v1beta1.transaction import _CANT_BEGIN - - client = _make_client() - transaction = self._make_one(client) - transaction._id = b"not-none" - - with self.assertRaises(ValueError) as exc_info: - transaction._begin() - - err_msg = _CANT_BEGIN.format(transaction._id) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - def test__clean_up(self): - transaction = self._make_one(mock.sentinel.client) - transaction._write_pbs.extend([mock.sentinel.write_pb1, mock.sentinel.write]) - transaction._id = b"not-this-time-my-friend" - - ret_val = transaction._clean_up() - self.assertIsNone(ret_val) - - self.assertEqual(transaction._write_pbs, []) - self.assertIsNone(transaction._id) - - def test__rollback(self): - from google.protobuf import empty_pb2 - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - firestore_api.rollback.return_value = empty_pb2.Empty() - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a transaction and roll it back. - transaction = self._make_one(client) - txn_id = b"to-be-r\x00lled" - transaction._id = txn_id - ret_val = transaction._rollback() - self.assertIsNone(ret_val) - self.assertIsNone(transaction._id) - - # Verify the called mock. - firestore_api.rollback.assert_called_once_with( - request={"database": client._database_string, "transaction": txn_id}, - metadata=client._rpc_metadata, - ) - - def test__rollback_not_allowed(self): - from google.cloud.firestore_v1beta1.transaction import _CANT_ROLLBACK - - client = _make_client() - transaction = self._make_one(client) - self.assertIsNone(transaction._id) - - with self.assertRaises(ValueError) as exc_info: - transaction._rollback() - - self.assertEqual(exc_info.exception.args, (_CANT_ROLLBACK,)) - - def test__rollback_failure(self): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy failure. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - exc = exceptions.InternalServerError("Fire during rollback.") - firestore_api.rollback.side_effect = exc - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a transaction and roll it back. - transaction = self._make_one(client) - txn_id = b"roll-bad-server" - transaction._id = txn_id - - with self.assertRaises(exceptions.InternalServerError) as exc_info: - transaction._rollback() - - self.assertIs(exc_info.exception, exc) - self.assertIsNone(transaction._id) - self.assertEqual(transaction._write_pbs, []) - - # Verify the called mock. - firestore_api.rollback.assert_called_once_with( - request={"database": client._database_string, "transaction": txn_id}, - metadata=client._rpc_metadata, - ) - - def test__commit(self): - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud.firestore_v1beta1.types import write - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - commit_response = firestore.CommitResponse(write_results=[write.WriteResult()]) - firestore_api.commit.return_value = commit_response - - # Attach the fake GAPIC to a real client. - client = _make_client("phone-joe") - client._firestore_api_internal = firestore_api - - # Actually make a transaction with some mutations and call _commit(). - transaction = self._make_one(client) - txn_id = b"under-over-thru-woods" - transaction._id = txn_id - document = client.document("zap", "galaxy", "ship", "space") - transaction.set(document, {"apple": 4.5}) - write_pbs = transaction._write_pbs[::] - - write_results = transaction._commit() - self.assertEqual(write_results, list(commit_response.write_results)) - # Make sure transaction has no more "changes". - self.assertIsNone(transaction._id) - self.assertEqual(transaction._write_pbs, []) - - # Verify the mocks. - firestore_api.commit.assert_called_once_with( - # 0:call(request={'database': 'projects/phone-joe/databases/(default)/documents', 'writes': [update { - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - def test__commit_not_allowed(self): - from google.cloud.firestore_v1beta1.transaction import _CANT_COMMIT - - transaction = self._make_one(mock.sentinel.client) - self.assertIsNone(transaction._id) - with self.assertRaises(ValueError) as exc_info: - transaction._commit() - - self.assertEqual(exc_info.exception.args, (_CANT_COMMIT,)) - - def test__commit_failure(self): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy failure. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - exc = exceptions.InternalServerError("Fire during commit.") - firestore_api.commit.side_effect = exc - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - # Actually make a transaction with some mutations and call _commit(). - transaction = self._make_one(client) - txn_id = b"beep-fail-commit" - transaction._id = txn_id - transaction.create(client.document("up", "down"), {"water": 1.0}) - transaction.delete(client.document("up", "left")) - write_pbs = transaction._write_pbs[::] - - with self.assertRaises(exceptions.InternalServerError) as exc_info: - transaction._commit() - - self.assertIs(exc_info.exception, exc) - self.assertEqual(transaction._id, txn_id) - self.assertEqual(transaction._write_pbs, write_pbs) - - # Verify the called mock. - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - -class Test_Transactional(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.transaction import _Transactional - - return _Transactional - - def _make_one(self, *args, **kwargs): - klass = self._get_target_class() - return klass(*args, **kwargs) - - def test_constructor(self): - wrapped = self._make_one(mock.sentinel.callable_) - self.assertIs(wrapped.to_wrap, mock.sentinel.callable_) - self.assertIsNone(wrapped.current_id) - self.assertIsNone(wrapped.retry_id) - - def test__reset(self): - wrapped = self._make_one(mock.sentinel.callable_) - wrapped.current_id = b"not-none" - wrapped.retry_id = b"also-not" - - ret_val = wrapped._reset() - self.assertIsNone(ret_val) - - self.assertIsNone(wrapped.current_id) - self.assertIsNone(wrapped.retry_id) - - def test__pre_commit_success(self): - to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"totes-began" - transaction = _make_transaction(txn_id) - result = wrapped._pre_commit(transaction, "pos", key="word") - self.assertIs(result, mock.sentinel.result) - - self.assertEqual(transaction._id, txn_id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction, "pos", key="word") - firestore_api = transaction._client._firestore_api - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": None, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_not_called() - - def test__pre_commit_retry_id_already_set_success(self): - from google.cloud.firestore_v1beta1.types import common - - to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) - wrapped = self._make_one(to_wrap) - txn_id1 = b"already-set" - wrapped.retry_id = txn_id1 - - txn_id2 = b"ok-here-too" - transaction = _make_transaction(txn_id2) - result = wrapped._pre_commit(transaction) - self.assertIs(result, mock.sentinel.result) - - self.assertEqual(transaction._id, txn_id2) - self.assertEqual(wrapped.current_id, txn_id2) - self.assertEqual(wrapped.retry_id, txn_id1) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction) - firestore_api = transaction._client._firestore_api - options_ = common.TransactionOptions( - read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id1) - ) - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": options_, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_not_called() - - def test__pre_commit_failure(self): - exc = RuntimeError("Nope not today.") - to_wrap = mock.Mock(side_effect=exc, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"gotta-fail" - transaction = _make_transaction(txn_id) - with self.assertRaises(RuntimeError) as exc_info: - wrapped._pre_commit(transaction, 10, 20) - self.assertIs(exc_info.exception, exc) - - self.assertIsNone(transaction._id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction, 10, 20) - firestore_api = transaction._client._firestore_api - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": None, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.commit.assert_not_called() - - def test__pre_commit_failure_with_rollback_failure(self): - from google.api_core import exceptions - - exc1 = ValueError("I will not be only failure.") - to_wrap = mock.Mock(side_effect=exc1, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"both-will-fail" - transaction = _make_transaction(txn_id) - # Actually force the ``rollback`` to fail as well. - exc2 = exceptions.InternalServerError("Rollback blues.") - firestore_api = transaction._client._firestore_api - firestore_api.rollback.side_effect = exc2 - - # Try to ``_pre_commit`` - with self.assertRaises(exceptions.InternalServerError) as exc_info: - wrapped._pre_commit(transaction, a="b", c="zebra") - self.assertIs(exc_info.exception, exc2) - - self.assertIsNone(transaction._id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction, a="b", c="zebra") - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": None, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.commit.assert_not_called() - - def test__maybe_commit_success(self): - wrapped = self._make_one(mock.sentinel.callable_) - - txn_id = b"nyet" - transaction = _make_transaction(txn_id) - transaction._id = txn_id # We won't call ``begin()``. - succeeded = wrapped._maybe_commit(transaction) - self.assertTrue(succeeded) - - # On success, _id is reset. - self.assertIsNone(transaction._id) - - # Verify mocks. - firestore_api = transaction._client._firestore_api - firestore_api.begin_transaction.assert_not_called() - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - def test__maybe_commit_failure_read_only(self): - from google.api_core import exceptions - - wrapped = self._make_one(mock.sentinel.callable_) - - txn_id = b"failed" - transaction = _make_transaction(txn_id, read_only=True) - transaction._id = txn_id # We won't call ``begin()``. - wrapped.current_id = txn_id # We won't call ``_pre_commit()``. - wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. - - # Actually force the ``commit`` to fail (use ABORTED, but cannot - # retry since read-only). - exc = exceptions.Aborted("Read-only did a bad.") - firestore_api = transaction._client._firestore_api - firestore_api.commit.side_effect = exc - - with self.assertRaises(exceptions.Aborted) as exc_info: - wrapped._maybe_commit(transaction) - self.assertIs(exc_info.exception, exc) - - self.assertEqual(transaction._id, txn_id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - firestore_api.begin_transaction.assert_not_called() - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - def test__maybe_commit_failure_can_retry(self): - from google.api_core import exceptions - - wrapped = self._make_one(mock.sentinel.callable_) - - txn_id = b"failed-but-retry" - transaction = _make_transaction(txn_id) - transaction._id = txn_id # We won't call ``begin()``. - wrapped.current_id = txn_id # We won't call ``_pre_commit()``. - wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. - - # Actually force the ``commit`` to fail. - exc = exceptions.Aborted("Read-write did a bad.") - firestore_api = transaction._client._firestore_api - firestore_api.commit.side_effect = exc - - succeeded = wrapped._maybe_commit(transaction) - self.assertFalse(succeeded) - - self.assertEqual(transaction._id, txn_id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - firestore_api.begin_transaction.assert_not_called() - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - def test__maybe_commit_failure_cannot_retry(self): - from google.api_core import exceptions - - wrapped = self._make_one(mock.sentinel.callable_) - - txn_id = b"failed-but-not-retryable" - transaction = _make_transaction(txn_id) - transaction._id = txn_id # We won't call ``begin()``. - wrapped.current_id = txn_id # We won't call ``_pre_commit()``. - wrapped.retry_id = txn_id # We won't call ``_pre_commit()``. - - # Actually force the ``commit`` to fail. - exc = exceptions.InternalServerError("Real bad thing") - firestore_api = transaction._client._firestore_api - firestore_api.commit.side_effect = exc - - with self.assertRaises(exceptions.InternalServerError) as exc_info: - wrapped._maybe_commit(transaction) - self.assertIs(exc_info.exception, exc) - - self.assertEqual(transaction._id, txn_id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - firestore_api.begin_transaction.assert_not_called() - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - def test___call__success_first_attempt(self): - to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"whole-enchilada" - transaction = _make_transaction(txn_id) - result = wrapped(transaction, "a", b="c") - self.assertIs(result, mock.sentinel.result) - - self.assertIsNone(transaction._id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction, "a", b="c") - firestore_api = transaction._client._firestore_api - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": None, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - def test___call__success_second_attempt(self): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.types import common - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud.firestore_v1beta1.types import write - - to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"whole-enchilada" - transaction = _make_transaction(txn_id) - - # Actually force the ``commit`` to fail on first / succeed on second. - exc = exceptions.Aborted("Contention junction.") - firestore_api = transaction._client._firestore_api - firestore_api.commit.side_effect = [ - exc, - firestore.CommitResponse(write_results=[write.WriteResult()]), - ] - - # Call the __call__-able ``wrapped``. - result = wrapped(transaction, "a", b="c") - self.assertIs(result, mock.sentinel.result) - - self.assertIsNone(transaction._id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - wrapped_call = mock.call(transaction, "a", b="c") - self.assertEqual(to_wrap.mock_calls, [wrapped_call, wrapped_call]) - firestore_api = transaction._client._firestore_api - db_str = transaction._client._database_string - options_ = common.TransactionOptions( - read_write=common.TransactionOptions.ReadWrite(retry_transaction=txn_id) - ) - self.assertEqual( - firestore_api.begin_transaction.mock_calls, - [ - mock.call( - request={"database": db_str, "options": None}, - metadata=transaction._client._rpc_metadata, - ), - mock.call( - request={"database": db_str, "options": options_}, - metadata=transaction._client._rpc_metadata, - ), - ], - ) - firestore_api.rollback.assert_not_called() - commit_call = mock.call( - request={"database": db_str, "writes": [], "transaction": txn_id}, - metadata=transaction._client._rpc_metadata, - ) - self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) - - def test___call__failure(self): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.transaction import _EXCEED_ATTEMPTS_TEMPLATE - - to_wrap = mock.Mock(return_value=mock.sentinel.result, spec=[]) - wrapped = self._make_one(to_wrap) - - txn_id = b"only-one-shot" - transaction = _make_transaction(txn_id, max_attempts=1) - - # Actually force the ``commit`` to fail. - exc = exceptions.Aborted("Contention just once.") - firestore_api = transaction._client._firestore_api - firestore_api.commit.side_effect = exc - - # Call the __call__-able ``wrapped``. - with self.assertRaises(ValueError) as exc_info: - wrapped(transaction, "here", there=1.5) - - err_msg = _EXCEED_ATTEMPTS_TEMPLATE.format(transaction._max_attempts) - self.assertEqual(exc_info.exception.args, (err_msg,)) - - self.assertIsNone(transaction._id) - self.assertEqual(wrapped.current_id, txn_id) - self.assertEqual(wrapped.retry_id, txn_id) - - # Verify mocks. - to_wrap.assert_called_once_with(transaction, "here", there=1.5) - firestore_api.begin_transaction.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "options": None, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.rollback.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - firestore_api.commit.assert_called_once_with( - request={ - "database": transaction._client._database_string, - "writes": [], - "transaction": txn_id, - }, - metadata=transaction._client._rpc_metadata, - ) - - -class Test_transactional(unittest.TestCase): - @staticmethod - def _call_fut(to_wrap): - from google.cloud.firestore_v1beta1.transaction import transactional - - return transactional(to_wrap) - - def test_it(self): - from google.cloud.firestore_v1beta1.transaction import _Transactional - - wrapped = self._call_fut(mock.sentinel.callable_) - self.assertIsInstance(wrapped, _Transactional) - self.assertIs(wrapped.to_wrap, mock.sentinel.callable_) - - -class Test__commit_with_retry(unittest.TestCase): - @staticmethod - def _call_fut(client, write_pbs, transaction_id): - from google.cloud.firestore_v1beta1.transaction import _commit_with_retry - - return _commit_with_retry(client, write_pbs, transaction_id) - - @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep") - def test_success_first_attempt(self, _sleep): - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - - # Attach the fake GAPIC to a real client. - client = _make_client("summer") - client._firestore_api_internal = firestore_api - - # Call function and check result. - txn_id = b"cheeeeeez" - commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) - self.assertIs(commit_response, firestore_api.commit.return_value) - - # Verify mocks used. - _sleep.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": mock.sentinel.write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - @mock.patch( - "google.cloud.firestore_v1beta1.transaction._sleep", side_effect=[2.0, 4.0] - ) - def test_success_third_attempt(self, _sleep): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - # Make sure the first two requests fail and the third succeeds. - firestore_api.commit.side_effect = [ - exceptions.ServiceUnavailable("Server sleepy."), - exceptions.ServiceUnavailable("Server groggy."), - mock.sentinel.commit_response, - ] - - # Attach the fake GAPIC to a real client. - client = _make_client("outside") - client._firestore_api_internal = firestore_api - - # Call function and check result. - txn_id = b"the-world\x00" - commit_response = self._call_fut(client, mock.sentinel.write_pbs, txn_id) - self.assertIs(commit_response, mock.sentinel.commit_response) - - # Verify mocks used. - self.assertEqual(_sleep.call_count, 2) - _sleep.assert_any_call(1.0) - _sleep.assert_any_call(2.0) - # commit() called same way 3 times. - commit_call = mock.call( - request={ - "database": client._database_string, - "writes": mock.sentinel.write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - self.assertEqual( - firestore_api.commit.mock_calls, [commit_call, commit_call, commit_call] - ) - - @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep") - def test_failure_first_attempt(self, _sleep): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - # Make sure the first request fails with an un-retryable error. - exc = exceptions.ResourceExhausted("We ran out of fries.") - firestore_api.commit.side_effect = exc - - # Attach the fake GAPIC to a real client. - client = _make_client("peanut-butter") - client._firestore_api_internal = firestore_api - - # Call function and check result. - txn_id = b"\x08\x06\x07\x05\x03\x00\x09-jenny" - with self.assertRaises(exceptions.ResourceExhausted) as exc_info: - self._call_fut(client, mock.sentinel.write_pbs, txn_id) - - self.assertIs(exc_info.exception, exc) - - # Verify mocks used. - _sleep.assert_not_called() - firestore_api.commit.assert_called_once_with( - request={ - "database": client._database_string, - "writes": mock.sentinel.write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - - @mock.patch("google.cloud.firestore_v1beta1.transaction._sleep", return_value=2.0) - def test_failure_second_attempt(self, _sleep): - from google.api_core import exceptions - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - - # Create a minimal fake GAPIC with a dummy result. - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - # Make sure the first request fails retry-able and second - # fails non-retryable. - exc1 = exceptions.ServiceUnavailable("Come back next time.") - exc2 = exceptions.InternalServerError("Server on fritz.") - firestore_api.commit.side_effect = [exc1, exc2] - - # Attach the fake GAPIC to a real client. - client = _make_client("peanut-butter") - client._firestore_api_internal = firestore_api - - # Call function and check result. - txn_id = b"the-journey-when-and-where-well-go" - with self.assertRaises(exceptions.InternalServerError) as exc_info: - self._call_fut(client, mock.sentinel.write_pbs, txn_id) - - self.assertIs(exc_info.exception, exc2) - - # Verify mocks used. - _sleep.assert_called_once_with(1.0) - # commit() called same way 2 times. - commit_call = mock.call( - request={ - "database": client._database_string, - "writes": mock.sentinel.write_pbs, - "transaction": txn_id, - }, - metadata=client._rpc_metadata, - ) - self.assertEqual(firestore_api.commit.mock_calls, [commit_call, commit_call]) - - -class Test__sleep(unittest.TestCase): - @staticmethod - def _call_fut(current_sleep, **kwargs): - from google.cloud.firestore_v1beta1.transaction import _sleep - - return _sleep(current_sleep, **kwargs) - - @mock.patch("random.uniform", return_value=5.5) - @mock.patch("time.sleep", return_value=None) - def test_defaults(self, sleep, uniform): - curr_sleep = 10.0 - self.assertLessEqual(uniform.return_value, curr_sleep) - - new_sleep = self._call_fut(curr_sleep) - self.assertEqual(new_sleep, 2.0 * curr_sleep) - - uniform.assert_called_once_with(0.0, curr_sleep) - sleep.assert_called_once_with(uniform.return_value) - - @mock.patch("random.uniform", return_value=10.5) - @mock.patch("time.sleep", return_value=None) - def test_explicit(self, sleep, uniform): - curr_sleep = 12.25 - self.assertLessEqual(uniform.return_value, curr_sleep) - - multiplier = 1.5 - new_sleep = self._call_fut(curr_sleep, max_sleep=100.0, multiplier=multiplier) - self.assertEqual(new_sleep, multiplier * curr_sleep) - - uniform.assert_called_once_with(0.0, curr_sleep) - sleep.assert_called_once_with(uniform.return_value) - - @mock.patch("random.uniform", return_value=6.75) - @mock.patch("time.sleep", return_value=None) - def test_exceeds_max(self, sleep, uniform): - curr_sleep = 20.0 - self.assertLessEqual(uniform.return_value, curr_sleep) - - max_sleep = 38.5 - new_sleep = self._call_fut(curr_sleep, max_sleep=max_sleep, multiplier=2.0) - self.assertEqual(new_sleep, max_sleep) - - uniform.assert_called_once_with(0.0, curr_sleep) - sleep.assert_called_once_with(uniform.return_value) - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -def _make_client(project="feral-tom-cat"): - from google.cloud.firestore_v1beta1.client import Client - - credentials = _make_credentials() - - with pytest.deprecated_call(): - return Client(project=project, credentials=credentials) - - -def _make_transaction(txn_id, **txn_kwargs): - from google.protobuf import empty_pb2 - from google.cloud.firestore_v1beta1.services.firestore import ( - client as firestore_client, - ) - from google.cloud.firestore_v1beta1.types import firestore - from google.cloud.firestore_v1beta1.types import write - from google.cloud.firestore_v1beta1.transaction import Transaction - - # Create a fake GAPIC ... - firestore_api = mock.create_autospec( - firestore_client.FirestoreClient, instance=True - ) - # ... with a dummy ``BeginTransactionResponse`` result ... - begin_response = firestore.BeginTransactionResponse(transaction=txn_id) - firestore_api.begin_transaction.return_value = begin_response - # ... and a dummy ``Rollback`` result ... - firestore_api.rollback.return_value = empty_pb2.Empty() - # ... and a dummy ``Commit`` result. - commit_response = firestore.CommitResponse(write_results=[write.WriteResult()]) - firestore_api.commit.return_value = commit_response - - # Attach the fake GAPIC to a real client. - client = _make_client() - client._firestore_api_internal = firestore_api - - return Transaction(client, **txn_kwargs) diff --git a/tests/unit/v1beta1/test_transforms.py b/tests/unit/v1beta1/test_transforms.py deleted file mode 100644 index 0f549ae07..000000000 --- a/tests/unit/v1beta1/test_transforms.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2017 Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_ValueList(unittest.TestCase): - @staticmethod - def _get_target_class(): - from google.cloud.firestore_v1beta1.transforms import _ValueList - - return _ValueList - - def _make_one(self, values): - return self._get_target_class()(values) - - def test_ctor_w_non_list_non_tuple(self): - invalid_values = (None, u"phred", b"DEADBEEF", 123, {}, object()) - for invalid_value in invalid_values: - with self.assertRaises(ValueError): - self._make_one(invalid_value) - - def test_ctor_w_empty(self): - with self.assertRaises(ValueError): - self._make_one([]) - - def test_ctor_w_non_empty_list(self): - values = ["phred", "bharney"] - inst = self._make_one(values) - self.assertEqual(inst.values, values) - - def test_ctor_w_non_empty_tuple(self): - values = ("phred", "bharney") - inst = self._make_one(values) - self.assertEqual(inst.values, list(values)) - - def test___eq___other_type(self): - values = ("phred", "bharney") - inst = self._make_one(values) - other = object() - self.assertFalse(inst == other) - - def test___eq___different_values(self): - values = ("phred", "bharney") - other_values = ("wylma", "bhetty") - inst = self._make_one(values) - other = self._make_one(other_values) - self.assertFalse(inst == other) - - def test___eq___same_values(self): - values = ("phred", "bharney") - inst = self._make_one(values) - other = self._make_one(values) - self.assertTrue(inst == other) diff --git a/tests/unit/v1beta1/test_watch.py b/tests/unit/v1beta1/test_watch.py deleted file mode 100644 index 87235b28e..000000000 --- a/tests/unit/v1beta1/test_watch.py +++ /dev/null @@ -1,849 +0,0 @@ -import datetime -import unittest -import mock -from google.cloud.firestore_v1beta1.types import firestore - - -class TestWatchDocTree(unittest.TestCase): - def _makeOne(self): - from google.cloud.firestore_v1beta1.watch import WatchDocTree - - return WatchDocTree() - - def test_insert_and_keys(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - inst = inst.insert("a", 2) - self.assertEqual(sorted(inst.keys()), ["a", "b"]) - - def test_remove_and_keys(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - inst = inst.insert("a", 2) - inst = inst.remove("a") - self.assertEqual(sorted(inst.keys()), ["b"]) - - def test_insert_and_find(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - inst = inst.insert("a", 2) - val = inst.find("a") - self.assertEqual(val.value, 2) - - def test___len__(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - inst = inst.insert("a", 2) - self.assertEqual(len(inst), 2) - - def test___iter__(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - inst = inst.insert("a", 2) - self.assertEqual(sorted(list(inst)), ["a", "b"]) - - def test___contains__(self): - inst = self._makeOne() - inst = inst.insert("b", 1) - self.assertTrue("b" in inst) - self.assertFalse("a" in inst) - - -class TestDocumentChange(unittest.TestCase): - def _makeOne(self, type, document, old_index, new_index): - from google.cloud.firestore_v1beta1.watch import DocumentChange - - return DocumentChange(type, document, old_index, new_index) - - def test_ctor(self): - inst = self._makeOne("type", "document", "old_index", "new_index") - self.assertEqual(inst.type, "type") - self.assertEqual(inst.document, "document") - self.assertEqual(inst.old_index, "old_index") - self.assertEqual(inst.new_index, "new_index") - - -class TestWatchResult(unittest.TestCase): - def _makeOne(self, snapshot, name, change_type): - from google.cloud.firestore_v1beta1.watch import WatchResult - - return WatchResult(snapshot, name, change_type) - - def test_ctor(self): - inst = self._makeOne("snapshot", "name", "change_type") - self.assertEqual(inst.snapshot, "snapshot") - self.assertEqual(inst.name, "name") - self.assertEqual(inst.change_type, "change_type") - - -class Test_maybe_wrap_exception(unittest.TestCase): - def _callFUT(self, exc): - from google.cloud.firestore_v1beta1.watch import _maybe_wrap_exception - - return _maybe_wrap_exception(exc) - - def test_is_grpc_error(self): - import grpc - from google.api_core.exceptions import GoogleAPICallError - - exc = grpc.RpcError() - result = self._callFUT(exc) - self.assertEqual(result.__class__, GoogleAPICallError) - - def test_is_not_grpc_error(self): - exc = ValueError() - result = self._callFUT(exc) - self.assertEqual(result.__class__, ValueError) - - -class Test_document_watch_comparator(unittest.TestCase): - def _callFUT(self, doc1, doc2): - from google.cloud.firestore_v1beta1.watch import document_watch_comparator - - return document_watch_comparator(doc1, doc2) - - def test_same_doc(self): - result = self._callFUT(1, 1) - self.assertEqual(result, 0) - - def test_diff_doc(self): - self.assertRaises(AssertionError, self._callFUT, 1, 2) - - -class TestWatch(unittest.TestCase): - def _makeOne( - self, - document_reference=None, - firestore=None, - target=None, - comparator=None, - snapshot_callback=None, - snapshot_class=None, - reference_class=None, - ): # pragma: NO COVER - from google.cloud.firestore_v1beta1.watch import Watch - - if document_reference is None: - document_reference = DummyDocumentReference() - if firestore is None: - firestore = DummyFirestore() - if target is None: - WATCH_TARGET_ID = 0x5079 # "Py" - target = {"documents": {"documents": ["/"]}, "target_id": WATCH_TARGET_ID} - if comparator is None: - comparator = self._document_watch_comparator - if snapshot_callback is None: - snapshot_callback = self._snapshot_callback - if snapshot_class is None: - snapshot_class = DummyDocumentSnapshot - if reference_class is None: - reference_class = DummyDocumentReference - inst = Watch( - document_reference, - firestore, - target, - comparator, - snapshot_callback, - snapshot_class, - reference_class, - BackgroundConsumer=DummyBackgroundConsumer, - ResumableBidiRpc=DummyRpc, - ) - return inst - - def setUp(self): - self.snapshotted = None - - def _document_watch_comparator(self, doc1, doc2): # pragma: NO COVER - return 0 - - def _snapshot_callback(self, docs, changes, read_time): - self.snapshotted = (docs, changes, read_time) - - def test_ctor(self): - inst = self._makeOne() - self.assertTrue(inst._consumer.started) - self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) - - def test__on_rpc_done(self): - inst = self._makeOne() - threading = DummyThreading() - with mock.patch("google.cloud.firestore_v1beta1.watch.threading", threading): - inst._on_rpc_done(True) - from google.cloud.firestore_v1beta1.watch import _RPC_ERROR_THREAD_NAME - - self.assertTrue(threading.threads[_RPC_ERROR_THREAD_NAME].started) - - def test_close(self): - inst = self._makeOne() - inst.close() - self.assertEqual(inst._consumer, None) - self.assertEqual(inst._rpc, None) - self.assertTrue(inst._closed) - - def test_close_already_closed(self): - inst = self._makeOne() - inst._closed = True - old_consumer = inst._consumer - inst.close() - self.assertEqual(inst._consumer, old_consumer) - - def test_close_inactive(self): - inst = self._makeOne() - old_consumer = inst._consumer - old_consumer.is_active = False - inst.close() - self.assertEqual(old_consumer.stopped, False) - - def test_unsubscribe(self): - inst = self._makeOne() - inst.unsubscribe() - self.assertTrue(inst._rpc is None) - - def test_for_document(self): - from google.cloud.firestore_v1beta1.watch import Watch - - docref = DummyDocumentReference() - snapshot_callback = self._snapshot_callback - snapshot_class_instance = DummyDocumentSnapshot - document_reference_class_instance = DummyDocumentReference - modulename = "google.cloud.firestore_v1beta1.watch" - with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): - with mock.patch( - "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer - ): - inst = Watch.for_document( - docref, - snapshot_callback, - snapshot_class_instance, - document_reference_class_instance, - ) - self.assertTrue(inst._consumer.started) - self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) - - def test_for_query(self): - from google.cloud.firestore_v1beta1.watch import Watch - - snapshot_callback = self._snapshot_callback - snapshot_class_instance = DummyDocumentSnapshot - document_reference_class_instance = DummyDocumentReference - modulename = "google.cloud.firestore_v1beta1.watch" - pb2 = DummyPb2() - with mock.patch("%s.firestore" % modulename, pb2): - with mock.patch("%s.Watch.ResumableBidiRpc" % modulename, DummyRpc): - with mock.patch( - "%s.Watch.BackgroundConsumer" % modulename, DummyBackgroundConsumer - ): - query = DummyQuery() - inst = Watch.for_query( - query, - snapshot_callback, - snapshot_class_instance, - document_reference_class_instance, - ) - self.assertTrue(inst._consumer.started) - self.assertTrue(inst._rpc.callbacks, [inst._on_rpc_done]) - self.assertEqual(inst._targets["query"]._pb, "dummy query target") - - def test_on_snapshot_target_no_change_no_target_ids_not_current(self): - inst = self._makeOne() - proto = DummyProto() - inst.on_snapshot(proto) # nothing to assert, no mutations, no rtnval - - def test_on_snapshot_target_no_change_no_target_ids_current(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change.read_time = 1 - inst.current = True - - def push(read_time, next_resume_token): - inst._read_time = read_time - inst._next_resume_token = next_resume_token - - inst.push = push - inst.on_snapshot(proto) - self.assertEqual(inst._read_time, 1) - self.assertEqual(inst._next_resume_token, None) - - def test_on_snapshot_target_add(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change.target_change_type = ( - firestore.TargetChange.TargetChangeType.ADD - ) - proto.target_change.target_ids = [1] # not "Py" - with self.assertRaises(Exception) as exc: - inst.on_snapshot(proto) - self.assertEqual(str(exc.exception), "Unexpected target ID 1 sent by server") - - def test_on_snapshot_target_remove(self): - inst = self._makeOne() - proto = DummyProto() - target_change = proto.target_change - target_change.target_change_type = ( - firestore.TargetChange.TargetChangeType.REMOVE - ) - with self.assertRaises(Exception) as exc: - inst.on_snapshot(proto) - self.assertEqual(str(exc.exception), "Error 1: hi") - - def test_on_snapshot_target_remove_nocause(self): - inst = self._makeOne() - proto = DummyProto() - target_change = proto.target_change - target_change.cause = None - target_change.target_change_type = ( - firestore.TargetChange.TargetChangeType.REMOVE - ) - with self.assertRaises(Exception) as exc: - inst.on_snapshot(proto) - self.assertEqual(str(exc.exception), "Error 13: internal error") - - def test_on_snapshot_target_reset(self): - inst = self._makeOne() - - def reset(): - inst._docs_reset = True - - inst._reset_docs = reset - proto = DummyProto() - target_change = proto.target_change - target_change.target_change_type = firestore.TargetChange.TargetChangeType.RESET - inst.on_snapshot(proto) - self.assertTrue(inst._docs_reset) - - def test_on_snapshot_target_current(self): - inst = self._makeOne() - inst.current = False - proto = DummyProto() - target_change = proto.target_change - target_change.target_change_type = ( - firestore.TargetChange.TargetChangeType.CURRENT - ) - inst.on_snapshot(proto) - self.assertTrue(inst.current) - - def test_on_snapshot_target_unknown(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change.target_change_type = "unknown" - with self.assertRaises(Exception) as exc: - inst.on_snapshot(proto) - self.assertTrue(inst._consumer is None) - self.assertTrue(inst._rpc is None) - self.assertEqual(str(exc.exception), "Unknown target change type: unknown ") - - def test_on_snapshot_document_change_removed(self): - from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID, ChangeType - - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change.removed_target_ids = [WATCH_TARGET_ID] - - class DummyDocument: - name = "fred" - - proto.document_change.document = DummyDocument() - inst.on_snapshot(proto) - self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) - - def test_on_snapshot_document_change_changed(self): - from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID - - inst = self._makeOne() - - proto = DummyProto() - proto.target_change = "" - proto.document_change.target_ids = [WATCH_TARGET_ID] - - class DummyDocument: - name = "fred" - fields = {} - create_time = None - update_time = None - - proto.document_change.document = DummyDocument() - inst.on_snapshot(proto) - self.assertEqual(inst.change_map["fred"].data, {}) - - def test_on_snapshot_document_change_changed_docname_db_prefix(self): - # TODO: Verify the current behavior. The change map currently contains - # the db-prefixed document name and not the bare document name. - from google.cloud.firestore_v1beta1.watch import WATCH_TARGET_ID - - inst = self._makeOne() - - proto = DummyProto() - proto.target_change = "" - proto.document_change.target_ids = [WATCH_TARGET_ID] - - class DummyDocument: - name = "abc://foo/documents/fred" - fields = {} - create_time = None - update_time = None - - proto.document_change.document = DummyDocument() - inst._firestore._database_string = "abc://foo" - inst.on_snapshot(proto) - self.assertEqual(inst.change_map["abc://foo/documents/fred"].data, {}) - - def test_on_snapshot_document_change_neither_changed_nor_removed(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change.target_ids = [] - - inst.on_snapshot(proto) - self.assertTrue(not inst.change_map) - - def test_on_snapshot_document_removed(self): - from google.cloud.firestore_v1beta1.watch import ChangeType - - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change = "" - - class DummyRemove(object): - document = "fred" - - remove = DummyRemove() - proto.document_remove = remove - proto.document_delete = "" - inst.on_snapshot(proto) - self.assertTrue(inst.change_map["fred"] is ChangeType.REMOVED) - - def test_on_snapshot_filter_update(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change = "" - proto.document_remove = "" - proto.document_delete = "" - - class DummyFilter(object): - count = 999 - - proto.filter = DummyFilter() - - def reset(): - inst._docs_reset = True - - inst._reset_docs = reset - inst.on_snapshot(proto) - self.assertTrue(inst._docs_reset) - - def test_on_snapshot_filter_update_no_size_change(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change = "" - proto.document_remove = "" - proto.document_delete = "" - - class DummyFilter(object): - count = 0 - - proto.filter = DummyFilter() - inst._docs_reset = False - - inst.on_snapshot(proto) - self.assertFalse(inst._docs_reset) - - def test_on_snapshot_unknown_listen_type(self): - inst = self._makeOne() - proto = DummyProto() - proto.target_change = "" - proto.document_change = "" - proto.document_remove = "" - proto.document_delete = "" - proto.filter = "" - with self.assertRaises(Exception) as exc: - inst.on_snapshot(proto) - self.assertTrue( - str(exc.exception).startswith("Unknown listen response type"), - str(exc.exception), - ) - - def test_push_callback_called_no_changes(self): - import pytz - - class DummyReadTime(object): - seconds = 1534858278 - - inst = self._makeOne() - inst.push(DummyReadTime, "token") - self.assertEqual( - self.snapshotted, - ([], [], datetime.datetime.fromtimestamp(DummyReadTime.seconds, pytz.utc)), - ) - self.assertTrue(inst.has_pushed) - self.assertEqual(inst.resume_token, "token") - - def test_push_already_pushed(self): - class DummyReadTime(object): - seconds = 1534858278 - - inst = self._makeOne() - inst.has_pushed = True - inst.push(DummyReadTime, "token") - self.assertEqual(self.snapshotted, None) - self.assertTrue(inst.has_pushed) - self.assertEqual(inst.resume_token, "token") - - def test__current_size_empty(self): - inst = self._makeOne() - result = inst._current_size() - self.assertEqual(result, 0) - - def test__current_size_docmap_has_one(self): - inst = self._makeOne() - inst.doc_map["a"] = 1 - result = inst._current_size() - self.assertEqual(result, 1) - - def test__affects_target_target_id_None(self): - inst = self._makeOne() - self.assertTrue(inst._affects_target(None, [])) - - def test__affects_target_current_id_in_target_ids(self): - inst = self._makeOne() - self.assertTrue(inst._affects_target([1], 1)) - - def test__affects_target_current_id_not_in_target_ids(self): - inst = self._makeOne() - self.assertFalse(inst._affects_target([1], 2)) - - def test__extract_changes_doc_removed(self): - from google.cloud.firestore_v1beta1.watch import ChangeType - - inst = self._makeOne() - changes = {"name": ChangeType.REMOVED} - doc_map = {"name": True} - results = inst._extract_changes(doc_map, changes, None) - self.assertEqual(results, (["name"], [], [])) - - def test__extract_changes_doc_removed_docname_not_in_docmap(self): - from google.cloud.firestore_v1beta1.watch import ChangeType - - inst = self._makeOne() - changes = {"name": ChangeType.REMOVED} - doc_map = {} - results = inst._extract_changes(doc_map, changes, None) - self.assertEqual(results, ([], [], [])) - - def test__extract_changes_doc_updated(self): - inst = self._makeOne() - - class Dummy(object): - pass - - doc = Dummy() - snapshot = Dummy() - changes = {"name": snapshot} - doc_map = {"name": doc} - results = inst._extract_changes(doc_map, changes, 1) - self.assertEqual(results, ([], [], [snapshot])) - self.assertEqual(snapshot.read_time, 1) - - def test__extract_changes_doc_updated_read_time_is_None(self): - inst = self._makeOne() - - class Dummy(object): - pass - - doc = Dummy() - snapshot = Dummy() - snapshot.read_time = None - changes = {"name": snapshot} - doc_map = {"name": doc} - results = inst._extract_changes(doc_map, changes, None) - self.assertEqual(results, ([], [], [snapshot])) - self.assertEqual(snapshot.read_time, None) - - def test__extract_changes_doc_added(self): - inst = self._makeOne() - - class Dummy(object): - pass - - snapshot = Dummy() - changes = {"name": snapshot} - doc_map = {} - results = inst._extract_changes(doc_map, changes, 1) - self.assertEqual(results, ([], [snapshot], [])) - self.assertEqual(snapshot.read_time, 1) - - def test__extract_changes_doc_added_read_time_is_None(self): - inst = self._makeOne() - - class Dummy(object): - pass - - snapshot = Dummy() - snapshot.read_time = None - changes = {"name": snapshot} - doc_map = {} - results = inst._extract_changes(doc_map, changes, None) - self.assertEqual(results, ([], [snapshot], [])) - self.assertEqual(snapshot.read_time, None) - - def test__compute_snapshot_doctree_and_docmap_disagree_about_length(self): - inst = self._makeOne() - doc_tree = {} - doc_map = {None: None} - self.assertRaises( - AssertionError, inst._compute_snapshot, doc_tree, doc_map, None, None, None - ) - - def test__compute_snapshot_operation_relative_ordering(self): - from google.cloud.firestore_v1beta1.watch import WatchDocTree - - doc_tree = WatchDocTree() - - class DummyDoc(object): - update_time = mock.sentinel - - deleted_doc = DummyDoc() - added_doc = DummyDoc() - added_doc._document_path = "/added" - updated_doc = DummyDoc() - updated_doc._document_path = "/updated" - doc_tree = doc_tree.insert(deleted_doc, None) - doc_tree = doc_tree.insert(updated_doc, None) - doc_map = {"/deleted": deleted_doc, "/updated": updated_doc} - added_snapshot = DummyDocumentSnapshot(added_doc, None, True, None, None, None) - added_snapshot.reference = added_doc - updated_snapshot = DummyDocumentSnapshot( - updated_doc, None, True, None, None, None - ) - updated_snapshot.reference = updated_doc - delete_changes = ["/deleted"] - add_changes = [added_snapshot] - update_changes = [updated_snapshot] - inst = self._makeOne() - updated_tree, updated_map, applied_changes = inst._compute_snapshot( - doc_tree, doc_map, delete_changes, add_changes, update_changes - ) - # TODO: Verify that the assertion here is correct. - self.assertEqual( - updated_map, {"/updated": updated_snapshot, "/added": added_snapshot} - ) - - def test__compute_snapshot_modify_docs_updated_doc_no_timechange(self): - from google.cloud.firestore_v1beta1.watch import WatchDocTree - - doc_tree = WatchDocTree() - - class DummyDoc(object): - pass - - updated_doc_v1 = DummyDoc() - updated_doc_v1.update_time = 1 - updated_doc_v1._document_path = "/updated" - updated_doc_v2 = DummyDoc() - updated_doc_v2.update_time = 1 - updated_doc_v2._document_path = "/updated" - doc_tree = doc_tree.insert("/updated", updated_doc_v1) - doc_map = {"/updated": updated_doc_v1} - updated_snapshot = DummyDocumentSnapshot( - updated_doc_v2, None, True, None, None, 1 - ) - delete_changes = [] - add_changes = [] - update_changes = [updated_snapshot] - inst = self._makeOne() - updated_tree, updated_map, applied_changes = inst._compute_snapshot( - doc_tree, doc_map, delete_changes, add_changes, update_changes - ) - self.assertEqual(updated_map, doc_map) # no change - - def test__reset_docs(self): - from google.cloud.firestore_v1beta1.watch import ChangeType - - inst = self._makeOne() - inst.change_map = {None: None} - from google.cloud.firestore_v1beta1.watch import WatchDocTree - - doc = DummyDocumentReference("doc") - doc_tree = WatchDocTree() - snapshot = DummyDocumentSnapshot(doc, None, True, None, None, None) - snapshot.reference = doc - doc_tree = doc_tree.insert(snapshot, None) - inst.doc_tree = doc_tree - inst._reset_docs() - self.assertEqual(inst.change_map, {"/doc": ChangeType.REMOVED}) - self.assertEqual(inst.resume_token, None) - self.assertFalse(inst.current) - - -class DummyFirestoreStub(object): - def Listen(self): # pragma: NO COVER - pass - - -class DummyFirestoreClient(object): - def __init__(self): - self._transport = mock.Mock(_stubs={"firestore_stub": DummyFirestoreStub()}) - - -class DummyDocumentReference(object): - def __init__(self, *document_path, **kw): - if "client" not in kw: - self._client = DummyFirestore() - else: - self._client = kw["client"] - - self._path = document_path - self._document_path = "/" + "/".join(document_path) - self.__dict__.update(kw) - - -class DummyQuery(object): # pragma: NO COVER - def __init__(self, **kw): - if "client" not in kw: - self._client = DummyFirestore() - else: - self._client = kw["client"] - - if "comparator" not in kw: - # don't really do the comparison, just return 0 (equal) for all - self._comparator = lambda x, y: 1 - else: - self._comparator = kw["comparator"] - - def _to_protobuf(self): - return "" - - -class DummyFirestore(object): - _firestore_api = DummyFirestoreClient() - _database_string = "abc://bar/" - _rpc_metadata = None - - def ListenRequest(self, **kw): # pragma: NO COVER - pass - - def document(self, *document_path): # pragma: NO COVER - if len(document_path) == 1: - path = document_path[0].split("/") - else: - path = document_path - - return DummyDocumentReference(*path, client=self) - - -class DummyDocumentSnapshot(object): - # def __init__(self, **kw): - # self.__dict__.update(kw) - def __init__(self, reference, data, exists, read_time, create_time, update_time): - self.reference = reference - self.data = data - self.exists = exists - self.read_time = read_time - self.create_time = create_time - self.update_time = update_time - - def __str__(self): - return "%s-%s" % (self.reference._document_path, self.read_time) - - def __hash__(self): - return hash(str(self)) - - -class DummyBackgroundConsumer(object): - started = False - stopped = False - is_active = True - - def __init__(self, rpc, on_snapshot): - self._rpc = rpc - self.on_snapshot = on_snapshot - - def start(self): - self.started = True - - def stop(self): - self.stopped = True - self.is_active = False - - -class DummyThread(object): - started = False - - def __init__(self, name, target, kwargs): - self.name = name - self.target = target - self.kwargs = kwargs - - def start(self): - self.started = True - - -class DummyThreading(object): - def __init__(self): - self.threads = {} - - def Thread(self, name, target, kwargs): - thread = DummyThread(name, target, kwargs) - self.threads[name] = thread - return thread - - -class DummyRpc(object): - def __init__(self, listen, initial_request, should_recover, metadata=None): - self.listen = listen - self.initial_request = initial_request - self.should_recover = should_recover - self.closed = False - self.callbacks = [] - self._metadata = metadata - - def add_done_callback(self, callback): - self.callbacks.append(callback) - - def close(self): - self.closed = True - - -class DummyCause(object): - code = 1 - message = "hi" - - -class DummyChange(object): - def __init__(self): - self.target_ids = [] - self.removed_target_ids = [] - self.read_time = 0 - self.target_change_type = firestore.TargetChange.TargetChangeType.NO_CHANGE - self.resume_token = None - self.cause = DummyCause() - - -class DummyProto(object): - def __init__(self): - self.target_change = DummyChange() - self.document_change = DummyChange() - - -class DummyTarget(object): - def QueryTarget(self, **kw): - self.kw = kw - return DummyQueryTarget() - - -class DummyQueryTarget(object): - @property - def _pb(self): - return "dummy query target" - - -class DummyPb2(object): - - Target = DummyTarget() - - def ListenRequest(self, **kw): - pass diff --git a/tests/unit/v1beta1/testdata/create-all-transforms.textproto b/tests/unit/v1beta1/testdata/create-all-transforms.textproto deleted file mode 100644 index bbdf19e4d..000000000 --- a/tests/unit/v1beta1/testdata/create-all-transforms.textproto +++ /dev/null @@ -1,64 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can be created with any amount of transforms. - -description: "create: all transforms in a single call" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto deleted file mode 100644 index f80d65b23..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove-multi.textproto +++ /dev/null @@ -1,61 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayRemove field. Since all the ArrayRemove -# fields are removed, the only field in the update is "a". - -description: "create: multiple ArrayRemove fields" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto deleted file mode 100644 index 97756c306..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove-nested.textproto +++ /dev/null @@ -1,48 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayRemove value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "create: nested ArrayRemove field" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto deleted file mode 100644 index 4ec0cb3b9..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayRemove. Firestore transforms don't support array indexing. - -description: "create: ArrayRemove cannot be anywhere inside an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto deleted file mode 100644 index 969b8d9dd..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "create: ArrayRemove cannot be in an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto deleted file mode 100644 index b6ea3224d..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayremove.textproto b/tests/unit/v1beta1/testdata/create-arrayremove.textproto deleted file mode 100644 index e8e4bb398..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayremove.textproto +++ /dev/null @@ -1,47 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayRemove is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "create: ArrayRemove with data" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto deleted file mode 100644 index ec3cb72f5..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion-multi.textproto +++ /dev/null @@ -1,61 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayUnion field. Since all the ArrayUnion -# fields are removed, the only field in the update is "a". - -description: "create: multiple ArrayUnion fields" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - append_missing_elements: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto deleted file mode 100644 index e6e81bc1d..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion-nested.textproto +++ /dev/null @@ -1,48 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayUnion value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "create: nested ArrayUnion field" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto deleted file mode 100644 index 4c0afe443..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayUnion. Firestore transforms don't support array indexing. - -description: "create: ArrayUnion cannot be anywhere inside an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto deleted file mode 100644 index 7b791fa41..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "create: ArrayUnion cannot be in an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto deleted file mode 100644 index a1bf4a90d..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "create: The ServerTimestamp sentinel cannot be in an ArrayUnion" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-arrayunion.textproto b/tests/unit/v1beta1/testdata/create-arrayunion.textproto deleted file mode 100644 index 98cb6ad8a..000000000 --- a/tests/unit/v1beta1/testdata/create-arrayunion.textproto +++ /dev/null @@ -1,47 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayUnion is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "create: ArrayUnion with data" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-basic.textproto b/tests/unit/v1beta1/testdata/create-basic.textproto deleted file mode 100644 index 433ffda72..000000000 --- a/tests/unit/v1beta1/testdata/create-basic.textproto +++ /dev/null @@ -1,27 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A simple call, resulting in a single update operation. - -description: "create: basic" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-complex.textproto b/tests/unit/v1beta1/testdata/create-complex.textproto deleted file mode 100644 index 00a994e20..000000000 --- a/tests/unit/v1beta1/testdata/create-complex.textproto +++ /dev/null @@ -1,61 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A call to a write method with complicated input data. - -description: "create: complex" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - array_value: < - values: < - integer_value: 1 - > - values: < - double_value: 2.5 - > - > - > - > - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "c" - value: < - array_value: < - values: < - string_value: "three" - > - values: < - map_value: < - fields: < - key: "d" - value: < - boolean_value: true - > - > - > - > - > - > - > - > - > - > - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto deleted file mode 100644 index 60694e137..000000000 --- a/tests/unit/v1beta1/testdata/create-del-noarray-nested.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "create: Delete cannot be anywhere inside an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-del-noarray.textproto b/tests/unit/v1beta1/testdata/create-del-noarray.textproto deleted file mode 100644 index 5731be1c7..000000000 --- a/tests/unit/v1beta1/testdata/create-del-noarray.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "create: Delete cannot be in an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"Delete\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-empty.textproto b/tests/unit/v1beta1/testdata/create-empty.textproto deleted file mode 100644 index 2b6fec7ef..000000000 --- a/tests/unit/v1beta1/testdata/create-empty.textproto +++ /dev/null @@ -1,20 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - - -description: "create: creating or setting an empty map" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-nodel.textproto b/tests/unit/v1beta1/testdata/create-nodel.textproto deleted file mode 100644 index c878814b1..000000000 --- a/tests/unit/v1beta1/testdata/create-nodel.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel cannot be used in Create, or in Set without a Merge option. - -description: "create: Delete cannot appear in data" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"Delete\"}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-nosplit.textproto b/tests/unit/v1beta1/testdata/create-nosplit.textproto deleted file mode 100644 index e9e1ee275..000000000 --- a/tests/unit/v1beta1/testdata/create-nosplit.textproto +++ /dev/null @@ -1,40 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Create and Set treat their map keys literally. They do not split on dots. - -description: "create: don\342\200\231t split on dots" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a.b" - value: < - map_value: < - fields: < - key: "c.d" - value: < - integer_value: 1 - > - > - > - > - > - fields: < - key: "e" - value: < - integer_value: 2 - > - > - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-special-chars.textproto b/tests/unit/v1beta1/testdata/create-special-chars.textproto deleted file mode 100644 index 3a7acd307..000000000 --- a/tests/unit/v1beta1/testdata/create-special-chars.textproto +++ /dev/null @@ -1,41 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Create and Set treat their map keys literally. They do not escape special -# characters. - -description: "create: non-alpha characters in map keys" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "*" - value: < - map_value: < - fields: < - key: "." - value: < - integer_value: 1 - > - > - > - > - > - fields: < - key: "~" - value: < - integer_value: 2 - > - > - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-st-alone.textproto b/tests/unit/v1beta1/testdata/create-st-alone.textproto deleted file mode 100644 index 9803a676b..000000000 --- a/tests/unit/v1beta1/testdata/create-st-alone.textproto +++ /dev/null @@ -1,26 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ServerTimestamps, then no update operation -# should be produced. - -description: "create: ServerTimestamp alone" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - set_to_server_value: REQUEST_TIME - > - > - current_document: < - exists: false - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-st-multi.textproto b/tests/unit/v1beta1/testdata/create-st-multi.textproto deleted file mode 100644 index cb3db4809..000000000 --- a/tests/unit/v1beta1/testdata/create-st-multi.textproto +++ /dev/null @@ -1,41 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ServerTimestamp field. Since all the -# ServerTimestamp fields are removed, the only field in the update is "a". - -description: "create: multiple ServerTimestamp fields" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c.d" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-st-nested.textproto b/tests/unit/v1beta1/testdata/create-st-nested.textproto deleted file mode 100644 index 6bc03e8e7..000000000 --- a/tests/unit/v1beta1/testdata/create-st-nested.textproto +++ /dev/null @@ -1,38 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A ServerTimestamp value can occur at any depth. In this case, the transform -# applies to the field path "b.c". Since "c" is removed from the update, "b" -# becomes empty, so it is also removed from the update. - -description: "create: nested ServerTimestamp field" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto deleted file mode 100644 index 0cec0aebd..000000000 --- a/tests/unit/v1beta1/testdata/create-st-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. - -description: "create: ServerTimestamp cannot be anywhere inside an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-st-noarray.textproto b/tests/unit/v1beta1/testdata/create-st-noarray.textproto deleted file mode 100644 index 56d91c2cf..000000000 --- a/tests/unit/v1beta1/testdata/create-st-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. Firestore transforms -# don't support array indexing. - -description: "create: ServerTimestamp cannot be in an array value" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto deleted file mode 100644 index 37e7e074a..000000000 --- a/tests/unit/v1beta1/testdata/create-st-with-empty-map.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp -# should be stripped out but the empty map should remain. - -description: "create: ServerTimestamp beside an empty map" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - map_value: < - > - > - > - > - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/create-st.textproto b/tests/unit/v1beta1/testdata/create-st.textproto deleted file mode 100644 index ddfc6a177..000000000 --- a/tests/unit/v1beta1/testdata/create-st.textproto +++ /dev/null @@ -1,39 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with the special ServerTimestamp sentinel is removed from the data in the -# update operation. Instead it appears in a separate Transform operation. Note -# that in these tests, the string "ServerTimestamp" should be replaced with the -# special ServerTimestamp value. - -description: "create: ServerTimestamp with data" -create: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - current_document: < - exists: false - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/delete-exists-precond.textproto b/tests/unit/v1beta1/testdata/delete-exists-precond.textproto deleted file mode 100644 index c9cf2ddea..000000000 --- a/tests/unit/v1beta1/testdata/delete-exists-precond.textproto +++ /dev/null @@ -1,21 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Delete supports an exists precondition. - -description: "delete: delete with exists precondition" -delete: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - exists: true - > - request: < - database: "projects/projectID/databases/(default)" - writes: < - delete: "projects/projectID/databases/(default)/documents/C/d" - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/delete-no-precond.textproto b/tests/unit/v1beta1/testdata/delete-no-precond.textproto deleted file mode 100644 index a396cdb8c..000000000 --- a/tests/unit/v1beta1/testdata/delete-no-precond.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ordinary Delete call. - -description: "delete: delete without precondition" -delete: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - request: < - database: "projects/projectID/databases/(default)" - writes: < - delete: "projects/projectID/databases/(default)/documents/C/d" - > - > -> diff --git a/tests/unit/v1beta1/testdata/delete-time-precond.textproto b/tests/unit/v1beta1/testdata/delete-time-precond.textproto deleted file mode 100644 index 5798f5f3b..000000000 --- a/tests/unit/v1beta1/testdata/delete-time-precond.textproto +++ /dev/null @@ -1,25 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Delete supports a last-update-time precondition. - -description: "delete: delete with last-update-time precondition" -delete: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - update_time: < - seconds: 42 - > - > - request: < - database: "projects/projectID/databases/(default)" - writes: < - delete: "projects/projectID/databases/(default)/documents/C/d" - current_document: < - update_time: < - seconds: 42 - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/get-basic.textproto b/tests/unit/v1beta1/testdata/get-basic.textproto deleted file mode 100644 index 2a4481682..000000000 --- a/tests/unit/v1beta1/testdata/get-basic.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A call to DocumentRef.Get. - -description: "get: get a document" -get: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - request: < - name: "projects/projectID/databases/(default)/documents/C/d" - > -> diff --git a/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto b/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto deleted file mode 100644 index 1aa8dcbc3..000000000 --- a/tests/unit/v1beta1/testdata/listen-add-mod-del-add.textproto +++ /dev/null @@ -1,246 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Various changes to a single document. - -description: "listen: add a doc, modify it, delete it, then add it again" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - responses: < - document_delete: < - document: "projects/projectID/databases/(default)/documents/C/d1" - > - > - responses: < - target_change: < - read_time: < - seconds: 3 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 4 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - changes: < - kind: MODIFIED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - > - read_time: < - seconds: 2 - > - > - snapshots: < - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - new_index: -1 - > - read_time: < - seconds: 3 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - old_index: -1 - > - read_time: < - seconds: 4 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-add-one.textproto b/tests/unit/v1beta1/testdata/listen-add-one.textproto deleted file mode 100644 index 2ad1d8e97..000000000 --- a/tests/unit/v1beta1/testdata/listen-add-one.textproto +++ /dev/null @@ -1,79 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Snapshot with a single document. - -description: "listen: add a doc" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-add-three.textproto b/tests/unit/v1beta1/testdata/listen-add-three.textproto deleted file mode 100644 index ac846f762..000000000 --- a/tests/unit/v1beta1/testdata/listen-add-three.textproto +++ /dev/null @@ -1,190 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A snapshot with three documents. The documents are sorted first by the "a" -# field, then by their path. The changes are ordered the same way. - -description: "listen: add three documents" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 2 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-doc-remove.textproto b/tests/unit/v1beta1/testdata/listen-doc-remove.textproto deleted file mode 100644 index 975200f97..000000000 --- a/tests/unit/v1beta1/testdata/listen-doc-remove.textproto +++ /dev/null @@ -1,115 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The DocumentRemove response behaves exactly like DocumentDelete. - -description: "listen: DocumentRemove behaves like DocumentDelete" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_remove: < - document: "projects/projectID/databases/(default)/documents/C/d1" - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - new_index: -1 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-empty.textproto b/tests/unit/v1beta1/testdata/listen-empty.textproto deleted file mode 100644 index 4d04b7909..000000000 --- a/tests/unit/v1beta1/testdata/listen-empty.textproto +++ /dev/null @@ -1,25 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There are no changes, so the snapshot should be empty. - -description: "listen: no changes; empty snapshot" -listen: < - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - snapshots: < - read_time: < - seconds: 1 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-filter-nop.textproto b/tests/unit/v1beta1/testdata/listen-filter-nop.textproto deleted file mode 100644 index 48fd72d3a..000000000 --- a/tests/unit/v1beta1/testdata/listen-filter-nop.textproto +++ /dev/null @@ -1,247 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Filter response whose count matches the size of the current state (docs in -# last snapshot + docs added - docs deleted) is a no-op. - -description: "listen: Filter response with same size is a no-op" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_delete: < - document: "projects/projectID/databases/(default)/documents/C/d1" - > - > - responses: < - filter: < - count: 2 - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: 1 - new_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 1 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-multi-docs.textproto b/tests/unit/v1beta1/testdata/listen-multi-docs.textproto deleted file mode 100644 index 8778acc3d..000000000 --- a/tests/unit/v1beta1/testdata/listen-multi-docs.textproto +++ /dev/null @@ -1,524 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Changes should be ordered with deletes first, then additions, then mods, each in -# query order. Old indices refer to the immediately previous state, not the -# previous snapshot - -description: "listen: multiple documents, added, deleted and updated" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d5" - fields: < - key: "a" - value: < - integer_value: 4 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_delete: < - document: "projects/projectID/databases/(default)/documents/C/d3" - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: -1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d6" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_delete: < - document: "projects/projectID/databases/(default)/documents/C/d2" - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: -2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 4 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 2 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 3 - > - read_time: < - seconds: 2 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: -2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: -1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d6" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d5" - fields: < - key: "a" - value: < - integer_value: 4 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - new_index: -1 - > - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - new_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d6" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 2 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d5" - fields: < - key: "a" - value: < - integer_value: 4 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 3 - > - changes: < - kind: MODIFIED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d4" - fields: < - key: "a" - value: < - integer_value: -2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - > - changes: < - kind: MODIFIED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: -1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - old_index: 1 - new_index: 1 - > - read_time: < - seconds: 4 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-nocurrent.textproto b/tests/unit/v1beta1/testdata/listen-nocurrent.textproto deleted file mode 100644 index 24239b645..000000000 --- a/tests/unit/v1beta1/testdata/listen-nocurrent.textproto +++ /dev/null @@ -1,141 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the watch state is not marked CURRENT, no snapshot is issued. - -description: "listen: no snapshot if we don't see CURRENT" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - old_index: -1 - new_index: 1 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-nomod.textproto b/tests/unit/v1beta1/testdata/listen-nomod.textproto deleted file mode 100644 index 2a99edc35..000000000 --- a/tests/unit/v1beta1/testdata/listen-nomod.textproto +++ /dev/null @@ -1,143 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Document updates are recognized by a change in the update time, not the data. -# This shouldn't actually happen. It is just a test of the update logic. - -description: "listen: add a doc, then change it but without changing its update time" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - responses: < - document_delete: < - document: "projects/projectID/databases/(default)/documents/C/d1" - > - > - responses: < - target_change: < - read_time: < - seconds: 3 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - new_index: -1 - > - read_time: < - seconds: 3 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto b/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto deleted file mode 100644 index 1e8ead2d8..000000000 --- a/tests/unit/v1beta1/testdata/listen-removed-target-ids.textproto +++ /dev/null @@ -1,131 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A DocumentChange with the watch target ID in the removed_target_ids field is the -# same as deleting a document. - -description: "listen: DocumentChange with removed_target_id is like a delete." -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - removed_target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - new_index: -1 - > - read_time: < - seconds: 2 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-reset.textproto b/tests/unit/v1beta1/testdata/listen-reset.textproto deleted file mode 100644 index 89a75df27..000000000 --- a/tests/unit/v1beta1/testdata/listen-reset.textproto +++ /dev/null @@ -1,382 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A RESET message turns off the CURRENT state, and marks all documents as deleted. - -# If a document appeared on the stream but was never part of a snapshot ("d3" in -# this test), a reset will make it disappear completely. - -# For a snapshot to happen at a NO_CHANGE reponse, we need to have both seen a -# CURRENT response, and have a change from the previous snapshot. Here, after the -# reset, we see the same version of d2 again. That doesn't result in a snapshot. - -description: "listen: RESET turns off CURRENT" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: RESET - > - > - responses: < - target_change: < - read_time: < - seconds: 2 - > - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 3 - > - > - > - responses: < - target_change: < - target_change_type: RESET - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - read_time: < - seconds: 4 - > - > - > - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - read_time: < - seconds: 5 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - old_index: -1 - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - new_index: 1 - > - read_time: < - seconds: 1 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - changes: < - kind: REMOVED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 2 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: 1 - new_index: -1 - > - changes: < - kind: MODIFIED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - > - read_time: < - seconds: 3 - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d2" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 3 - > - > - docs: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d3" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 2 - > - > - old_index: -1 - new_index: 1 - > - read_time: < - seconds: 5 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto b/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto deleted file mode 100644 index 3fa7cce56..000000000 --- a/tests/unit/v1beta1/testdata/listen-target-add-nop.textproto +++ /dev/null @@ -1,88 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A TargetChange_ADD response must have the same watch target ID. - -description: "listen: TargetChange_ADD is a no-op if it has the same target ID" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - target_change_type: ADD - target_ids: 1 - read_time: < - seconds: 2 - > - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - snapshots: < - docs: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - changes: < - kind: ADDED - doc: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - old_index: -1 - > - read_time: < - seconds: 1 - > - > -> diff --git a/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto b/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto deleted file mode 100644 index 87544637b..000000000 --- a/tests/unit/v1beta1/testdata/listen-target-add-wrong-id.textproto +++ /dev/null @@ -1,50 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A TargetChange_ADD response must have the same watch target ID. - -description: "listen: TargetChange_ADD is an error if it has a different target ID" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - target_change_type: ADD - target_ids: 2 - read_time: < - seconds: 2 - > - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/listen-target-remove.textproto b/tests/unit/v1beta1/testdata/listen-target-remove.textproto deleted file mode 100644 index f34b0890c..000000000 --- a/tests/unit/v1beta1/testdata/listen-target-remove.textproto +++ /dev/null @@ -1,46 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A TargetChange_REMOVE response should never be sent. - -description: "listen: TargetChange_REMOVE should not appear" -listen: < - responses: < - document_change: < - document: < - name: "projects/projectID/databases/(default)/documents/C/d1" - fields: < - key: "a" - value: < - integer_value: 3 - > - > - create_time: < - seconds: 1 - > - update_time: < - seconds: 1 - > - > - target_ids: 1 - > - > - responses: < - target_change: < - target_change_type: CURRENT - > - > - responses: < - target_change: < - target_change_type: REMOVE - > - > - responses: < - target_change: < - read_time: < - seconds: 1 - > - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto b/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto deleted file mode 100644 index 3c926da96..000000000 --- a/tests/unit/v1beta1/testdata/query-arrayremove-cursor.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove is not permitted in queries. - -description: "query: ArrayRemove in cursor method" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - json_values: "[\"ArrayRemove\", 1, 2, 3]" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto b/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto deleted file mode 100644 index 000b76350..000000000 --- a/tests/unit/v1beta1/testdata/query-arrayremove-where.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove is not permitted in queries. - -description: "query: ArrayRemove in Where" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "[\"ArrayRemove\", 1, 2, 3]" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto b/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto deleted file mode 100644 index e8a61104d..000000000 --- a/tests/unit/v1beta1/testdata/query-arrayunion-cursor.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion is not permitted in queries. - -description: "query: ArrayUnion in cursor method" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - json_values: "[\"ArrayUnion\", 1, 2, 3]" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto b/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto deleted file mode 100644 index 94923134e..000000000 --- a/tests/unit/v1beta1/testdata/query-arrayunion-where.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion is not permitted in queries. - -description: "query: ArrayUnion in Where" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "[\"ArrayUnion\", 1, 2, 3]" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-bad-NaN.textproto b/tests/unit/v1beta1/testdata/query-bad-NaN.textproto deleted file mode 100644 index 6806dd04a..000000000 --- a/tests/unit/v1beta1/testdata/query-bad-NaN.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# You can only compare NaN for equality. - -description: "query: where clause with non-== comparison with NaN" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "<" - json_value: "\"NaN\"" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-bad-null.textproto b/tests/unit/v1beta1/testdata/query-bad-null.textproto deleted file mode 100644 index 7fdfb3f2b..000000000 --- a/tests/unit/v1beta1/testdata/query-bad-null.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# You can only compare Null for equality. - -description: "query: where clause with non-== comparison with Null" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: ">" - json_value: "null" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto deleted file mode 100644 index bab8601e8..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-order.textproto +++ /dev/null @@ -1,68 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a document snapshot is used, the client appends a __name__ order-by clause -# with the direction of the last order-by clause. - -description: "query: cursor methods with a document snapshot, existing orderBy" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - order_by: < - path: < - field: "b" - > - direction: "desc" - > - > - clauses: < - start_after: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - order_by: < - field: < - field_path: "b" - > - direction: DESCENDING - > - order_by: < - field: < - field_path: "__name__" - > - direction: DESCENDING - > - start_at: < - values: < - integer_value: 7 - > - values: < - integer_value: 8 - > - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto deleted file mode 100644 index d0ce3df45..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-orderby-name.textproto +++ /dev/null @@ -1,76 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If there is an existing orderBy clause on __name__, no changes are made to the -# list of orderBy clauses. - -description: "query: cursor method, doc snapshot, existing orderBy __name__" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "desc" - > - > - clauses: < - order_by: < - path: < - field: "__name__" - > - direction: "asc" - > - > - clauses: < - start_at: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - clauses: < - end_at: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: DESCENDING - > - order_by: < - field: < - field_path: "__name__" - > - direction: ASCENDING - > - start_at: < - values: < - integer_value: 7 - > - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - before: true - > - end_at: < - values: < - integer_value: 7 - > - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto deleted file mode 100644 index 8b1e217df..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-eq.textproto +++ /dev/null @@ -1,53 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Where clause using equality doesn't change the implicit orderBy clauses. - -description: "query: cursor methods with a document snapshot and an equality where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "3" - > - > - clauses: < - end_at: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - where: < - field_filter: < - field: < - field_path: "a" - > - op: EQUAL - value: < - integer_value: 3 - > - > - > - order_by: < - field: < - field_path: "__name__" - > - direction: ASCENDING - > - end_at: < - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto deleted file mode 100644 index a69edfc50..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq-orderby.textproto +++ /dev/null @@ -1,72 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If there is an OrderBy clause, the inequality Where clause does not result in a -# new OrderBy clause. We still add a __name__ OrderBy clause - -description: "query: cursor method, doc snapshot, inequality where clause, and existing orderBy clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "desc" - > - > - clauses: < - where: < - path: < - field: "a" - > - op: "<" - json_value: "4" - > - > - clauses: < - start_at: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - where: < - field_filter: < - field: < - field_path: "a" - > - op: LESS_THAN - value: < - integer_value: 4 - > - > - > - order_by: < - field: < - field_path: "a" - > - direction: DESCENDING - > - order_by: < - field: < - field_path: "__name__" - > - direction: DESCENDING - > - start_at: < - values: < - integer_value: 7 - > - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto deleted file mode 100644 index 871dd0ba3..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap-where-neq.textproto +++ /dev/null @@ -1,64 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Where clause with an inequality results in an OrderBy clause on that clause's -# path, if there are no other OrderBy clauses. - -description: "query: cursor method with a document snapshot and an inequality where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "<=" - json_value: "3" - > - > - clauses: < - end_before: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - where: < - field_filter: < - field: < - field_path: "a" - > - op: LESS_THAN_OR_EQUAL - value: < - integer_value: 3 - > - > - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - order_by: < - field: < - field_path: "__name__" - > - direction: ASCENDING - > - end_at: < - values: < - integer_value: 7 - > - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto b/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto deleted file mode 100644 index 184bffc2d..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-docsnap.textproto +++ /dev/null @@ -1,34 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a document snapshot is used, the client appends a __name__ order-by clause. - -description: "query: cursor methods with a document snapshot" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - start_at: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "__name__" - > - direction: ASCENDING - > - start_at: < - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D" - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto b/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto deleted file mode 100644 index c197d23af..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty-map.textproto +++ /dev/null @@ -1,41 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods are allowed to use empty maps with EndBefore. It should result in -# an empty map in the query. - -description: "query: EndBefore with explicit empty map" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - json_values: "{}" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - end_at: < - values: < - map_value: < - > - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto b/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto deleted file mode 100644 index a41775abf..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-endbefore-empty.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods are not allowed to use empty values with EndBefore. It should -# result in an error. - -description: "query: EndBefore with empty values" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto b/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto deleted file mode 100644 index fb999ddab..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-no-order.textproto +++ /dev/null @@ -1,16 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a cursor method with a list of values is provided, there must be at least as -# many explicit orderBy clauses as values. - -description: "query: cursor method without orderBy" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - start_at: < - json_values: "2" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto b/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto deleted file mode 100644 index 557aca2c9..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-startat-empty-map.textproto +++ /dev/null @@ -1,41 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods are allowed to use empty maps with StartAt. It should result in -# an empty map in the query. - -description: "query: StartAt with explicit empty map" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - start_at: < - json_values: "{}" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - start_at: < - values: < - map_value: < - > - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto b/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto deleted file mode 100644 index e0c54d98a..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-startat-empty.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods are not allowed to use empty values with StartAt. It should -# result in an error. - -description: "query: StartAt with empty values" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - start_at: < - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto deleted file mode 100644 index bb08ab7d4..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-vals-1a.textproto +++ /dev/null @@ -1,50 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods take the same number of values as there are OrderBy clauses. - -description: "query: StartAt/EndBefore with values" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - start_at: < - json_values: "7" - > - > - clauses: < - end_before: < - json_values: "9" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - start_at: < - values: < - integer_value: 7 - > - before: true - > - end_at: < - values: < - integer_value: 9 - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto deleted file mode 100644 index 41e69e9e6..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-vals-1b.textproto +++ /dev/null @@ -1,48 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods take the same number of values as there are OrderBy clauses. - -description: "query: StartAfter/EndAt with values" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - start_after: < - json_values: "7" - > - > - clauses: < - end_at: < - json_values: "9" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - start_at: < - values: < - integer_value: 7 - > - > - end_at: < - values: < - integer_value: 9 - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto deleted file mode 100644 index 8e37ad003..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-vals-2.textproto +++ /dev/null @@ -1,71 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor methods take the same number of values as there are OrderBy clauses. - -description: "query: Start/End with two values" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - order_by: < - path: < - field: "b" - > - direction: "desc" - > - > - clauses: < - start_at: < - json_values: "7" - json_values: "8" - > - > - clauses: < - end_at: < - json_values: "9" - json_values: "10" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - order_by: < - field: < - field_path: "b" - > - direction: DESCENDING - > - start_at: < - values: < - integer_value: 7 - > - values: < - integer_value: 8 - > - before: true - > - end_at: < - values: < - integer_value: 9 - > - values: < - integer_value: 10 - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto deleted file mode 100644 index 91af3486c..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-vals-docid.textproto +++ /dev/null @@ -1,50 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Cursor values corresponding to a __name__ field take the document path relative -# to the query's collection. - -description: "query: cursor methods with __name__" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "__name__" - > - direction: "asc" - > - > - clauses: < - start_after: < - json_values: "\"D1\"" - > - > - clauses: < - end_before: < - json_values: "\"D2\"" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "__name__" - > - direction: ASCENDING - > - start_at: < - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D1" - > - > - end_at: < - values: < - reference_value: "projects/projectID/databases/(default)/documents/C/D2" - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto b/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto deleted file mode 100644 index 9e8fbb19f..000000000 --- a/tests/unit/v1beta1/testdata/query-cursor-vals-last-wins.textproto +++ /dev/null @@ -1,60 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When multiple Start* or End* calls occur, the values of the last one are used. - -description: "query: cursor methods, last one wins" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - start_after: < - json_values: "1" - > - > - clauses: < - start_at: < - json_values: "2" - > - > - clauses: < - end_at: < - json_values: "3" - > - > - clauses: < - end_before: < - json_values: "4" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "a" - > - direction: ASCENDING - > - start_at: < - values: < - integer_value: 2 - > - before: true - > - end_at: < - values: < - integer_value: 4 - > - before: true - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-del-cursor.textproto b/tests/unit/v1beta1/testdata/query-del-cursor.textproto deleted file mode 100644 index c9d4adb7c..000000000 --- a/tests/unit/v1beta1/testdata/query-del-cursor.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Sentinel values are not permitted in queries. - -description: "query: Delete in cursor method" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - json_values: "\"Delete\"" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-del-where.textproto b/tests/unit/v1beta1/testdata/query-del-where.textproto deleted file mode 100644 index 8e9252949..000000000 --- a/tests/unit/v1beta1/testdata/query-del-where.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Sentinel values are not permitted in queries. - -description: "query: Delete in Where" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "\"Delete\"" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-invalid-operator.textproto b/tests/unit/v1beta1/testdata/query-invalid-operator.textproto deleted file mode 100644 index e580c64a7..000000000 --- a/tests/unit/v1beta1/testdata/query-invalid-operator.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The != operator is not supported. - -description: "query: invalid operator in Where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "!=" - json_value: "4" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto deleted file mode 100644 index e0a720576..000000000 --- a/tests/unit/v1beta1/testdata/query-invalid-path-order.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The path has an empty component. - -description: "query: invalid path in OrderBy clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "*" - field: "" - > - direction: "asc" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto deleted file mode 100644 index 944f984f7..000000000 --- a/tests/unit/v1beta1/testdata/query-invalid-path-select.textproto +++ /dev/null @@ -1,18 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The path has an empty component. - -description: "query: invalid path in Where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - select: < - fields: < - field: "*" - field: "" - > - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto b/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto deleted file mode 100644 index 527923b09..000000000 --- a/tests/unit/v1beta1/testdata/query-invalid-path-where.textproto +++ /dev/null @@ -1,20 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The path has an empty component. - -description: "query: invalid path in Where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "*" - field: "" - > - op: "==" - json_value: "4" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto b/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto deleted file mode 100644 index dc301f439..000000000 --- a/tests/unit/v1beta1/testdata/query-offset-limit-last-wins.textproto +++ /dev/null @@ -1,30 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# With multiple Offset or Limit clauses, the last one wins. - -description: "query: multiple Offset and Limit clauses" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - offset: 2 - > - clauses: < - limit: 3 - > - clauses: < - limit: 4 - > - clauses: < - offset: 5 - > - query: < - from: < - collection_id: "C" - > - offset: 5 - limit: < - value: 4 - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-offset-limit.textproto b/tests/unit/v1beta1/testdata/query-offset-limit.textproto deleted file mode 100644 index 136d9d46a..000000000 --- a/tests/unit/v1beta1/testdata/query-offset-limit.textproto +++ /dev/null @@ -1,24 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Offset and Limit clauses. - -description: "query: Offset and Limit clauses" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - offset: 2 - > - clauses: < - limit: 3 - > - query: < - from: < - collection_id: "C" - > - offset: 2 - limit: < - value: 3 - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-order.textproto b/tests/unit/v1beta1/testdata/query-order.textproto deleted file mode 100644 index 7ed4c4ead..000000000 --- a/tests/unit/v1beta1/testdata/query-order.textproto +++ /dev/null @@ -1,42 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Multiple OrderBy clauses combine. - -description: "query: basic OrderBy clauses" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "b" - > - direction: "asc" - > - > - clauses: < - order_by: < - path: < - field: "a" - > - direction: "desc" - > - > - query: < - from: < - collection_id: "C" - > - order_by: < - field: < - field_path: "b" - > - direction: ASCENDING - > - order_by: < - field: < - field_path: "a" - > - direction: DESCENDING - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-select-empty.textproto b/tests/unit/v1beta1/testdata/query-select-empty.textproto deleted file mode 100644 index def8b55ac..000000000 --- a/tests/unit/v1beta1/testdata/query-select-empty.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An empty Select clause selects just the document ID. - -description: "query: empty Select clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - select: < - > - > - query: < - select: < - fields: < - field_path: "__name__" - > - > - from: < - collection_id: "C" - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-select-last-wins.textproto b/tests/unit/v1beta1/testdata/query-select-last-wins.textproto deleted file mode 100644 index bd78d09eb..000000000 --- a/tests/unit/v1beta1/testdata/query-select-last-wins.textproto +++ /dev/null @@ -1,36 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The last Select clause is the only one used. - -description: "query: two Select clauses" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - select: < - fields: < - field: "a" - > - fields: < - field: "b" - > - > - > - clauses: < - select: < - fields: < - field: "c" - > - > - > - query: < - select: < - fields: < - field_path: "c" - > - > - from: < - collection_id: "C" - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-select.textproto b/tests/unit/v1beta1/testdata/query-select.textproto deleted file mode 100644 index 15e112497..000000000 --- a/tests/unit/v1beta1/testdata/query-select.textproto +++ /dev/null @@ -1,32 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ordinary Select clause. - -description: "query: Select clause with some fields" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - select: < - fields: < - field: "a" - > - fields: < - field: "b" - > - > - > - query: < - select: < - fields: < - field_path: "a" - > - fields: < - field_path: "b" - > - > - from: < - collection_id: "C" - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-st-cursor.textproto b/tests/unit/v1beta1/testdata/query-st-cursor.textproto deleted file mode 100644 index 66885d0dd..000000000 --- a/tests/unit/v1beta1/testdata/query-st-cursor.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Sentinel values are not permitted in queries. - -description: "query: ServerTimestamp in cursor method" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - order_by: < - path: < - field: "a" - > - direction: "asc" - > - > - clauses: < - end_before: < - json_values: "\"ServerTimestamp\"" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-st-where.textproto b/tests/unit/v1beta1/testdata/query-st-where.textproto deleted file mode 100644 index 05da28d54..000000000 --- a/tests/unit/v1beta1/testdata/query-st-where.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Sentinel values are not permitted in queries. - -description: "query: ServerTimestamp in Where" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "\"ServerTimestamp\"" - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/query-where-2.textproto b/tests/unit/v1beta1/testdata/query-where-2.textproto deleted file mode 100644 index 103446307..000000000 --- a/tests/unit/v1beta1/testdata/query-where-2.textproto +++ /dev/null @@ -1,59 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Multiple Where clauses are combined into a composite filter. - -description: "query: two Where clauses" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: ">=" - json_value: "5" - > - > - clauses: < - where: < - path: < - field: "b" - > - op: "<" - json_value: "\"foo\"" - > - > - query: < - from: < - collection_id: "C" - > - where: < - composite_filter: < - op: AND - filters: < - field_filter: < - field: < - field_path: "a" - > - op: GREATER_THAN_OR_EQUAL - value: < - integer_value: 5 - > - > - > - filters: < - field_filter: < - field: < - field_path: "b" - > - op: LESS_THAN - value: < - string_value: "foo" - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-where-NaN.textproto b/tests/unit/v1beta1/testdata/query-where-NaN.textproto deleted file mode 100644 index 4a97ca7dd..000000000 --- a/tests/unit/v1beta1/testdata/query-where-NaN.textproto +++ /dev/null @@ -1,31 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Where clause that tests for equality with NaN results in a unary filter. - -description: "query: a Where clause comparing to NaN" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "\"NaN\"" - > - > - query: < - from: < - collection_id: "C" - > - where: < - unary_filter: < - op: IS_NAN - field: < - field_path: "a" - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-where-null.textproto b/tests/unit/v1beta1/testdata/query-where-null.textproto deleted file mode 100644 index 1869c60c7..000000000 --- a/tests/unit/v1beta1/testdata/query-where-null.textproto +++ /dev/null @@ -1,31 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Where clause that tests for equality with null results in a unary filter. - -description: "query: a Where clause comparing to null" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: "==" - json_value: "null" - > - > - query: < - from: < - collection_id: "C" - > - where: < - unary_filter: < - op: IS_NULL - field: < - field_path: "a" - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-where.textproto b/tests/unit/v1beta1/testdata/query-where.textproto deleted file mode 100644 index 045c2befa..000000000 --- a/tests/unit/v1beta1/testdata/query-where.textproto +++ /dev/null @@ -1,34 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A simple Where clause. - -description: "query: Where clause" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - where: < - path: < - field: "a" - > - op: ">" - json_value: "5" - > - > - query: < - from: < - collection_id: "C" - > - where: < - field_filter: < - field: < - field_path: "a" - > - op: GREATER_THAN - value: < - integer_value: 5 - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/query-wrong-collection.textproto b/tests/unit/v1beta1/testdata/query-wrong-collection.textproto deleted file mode 100644 index ad6f353d5..000000000 --- a/tests/unit/v1beta1/testdata/query-wrong-collection.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a document snapshot is passed to a Start*/End* method, it must be in the same -# collection as the query. - -description: "query: doc snapshot with wrong collection in cursor method" -query: < - coll_path: "projects/projectID/databases/(default)/documents/C" - clauses: < - end_before: < - doc_snapshot: < - path: "projects/projectID/databases/(default)/documents/C2/D" - json_data: "{\"a\": 7, \"b\": 8}" - > - > - > - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-all-transforms.textproto b/tests/unit/v1beta1/testdata/set-all-transforms.textproto deleted file mode 100644 index bf18f9a5b..000000000 --- a/tests/unit/v1beta1/testdata/set-all-transforms.textproto +++ /dev/null @@ -1,61 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can be created with any amount of transforms. - -description: "set: all transforms in a single call" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto deleted file mode 100644 index 9b62fe191..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove-multi.textproto +++ /dev/null @@ -1,58 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayRemove field. Since all the ArrayRemove -# fields are removed, the only field in the update is "a". - -description: "set: multiple ArrayRemove fields" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto deleted file mode 100644 index 617609c5a..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove-nested.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayRemove value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "set: nested ArrayRemove field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto deleted file mode 100644 index 2efa34a59..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayRemove. Firestore transforms don't support array indexing. - -description: "set: ArrayRemove cannot be anywhere inside an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto deleted file mode 100644 index e7aa209ea..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "set: ArrayRemove cannot be in an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto deleted file mode 100644 index 353025b59..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayremove.textproto b/tests/unit/v1beta1/testdata/set-arrayremove.textproto deleted file mode 100644 index 8aa6b60d0..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayremove.textproto +++ /dev/null @@ -1,44 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayRemove is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "set: ArrayRemove with data" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto deleted file mode 100644 index e515bfa8d..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion-multi.textproto +++ /dev/null @@ -1,58 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayUnion field. Since all the ArrayUnion -# fields are removed, the only field in the update is "a". - -description: "set: multiple ArrayUnion fields" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - append_missing_elements: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto deleted file mode 100644 index f8abeb0d0..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion-nested.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayUnion value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "set: nested ArrayUnion field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto deleted file mode 100644 index 2b4170f43..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayUnion. Firestore transforms don't support array indexing. - -description: "set: ArrayUnion cannot be anywhere inside an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto deleted file mode 100644 index e08af3a07..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "set: ArrayUnion cannot be in an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto deleted file mode 100644 index 37a7a132e..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "set: The ServerTimestamp sentinel cannot be in an ArrayUnion" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-arrayunion.textproto b/tests/unit/v1beta1/testdata/set-arrayunion.textproto deleted file mode 100644 index 4751e0c0e..000000000 --- a/tests/unit/v1beta1/testdata/set-arrayunion.textproto +++ /dev/null @@ -1,44 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayUnion is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "set: ArrayUnion with data" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-basic.textproto b/tests/unit/v1beta1/testdata/set-basic.textproto deleted file mode 100644 index e9b292e3c..000000000 --- a/tests/unit/v1beta1/testdata/set-basic.textproto +++ /dev/null @@ -1,24 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A simple call, resulting in a single update operation. - -description: "set: basic" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-complex.textproto b/tests/unit/v1beta1/testdata/set-complex.textproto deleted file mode 100644 index 6ec19500a..000000000 --- a/tests/unit/v1beta1/testdata/set-complex.textproto +++ /dev/null @@ -1,58 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A call to a write method with complicated input data. - -description: "set: complex" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - array_value: < - values: < - integer_value: 1 - > - values: < - double_value: 2.5 - > - > - > - > - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "c" - value: < - array_value: < - values: < - string_value: "three" - > - values: < - map_value: < - fields: < - key: "d" - value: < - boolean_value: true - > - > - > - > - > - > - > - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto b/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto deleted file mode 100644 index 811ab8dfe..000000000 --- a/tests/unit/v1beta1/testdata/set-del-merge-alone.textproto +++ /dev/null @@ -1,28 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Delete sentinel can appear with a merge option. If the delete paths are the -# only ones to be merged, then no document is sent, just an update mask. - -description: "set-merge: Delete with merge" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "b" - field: "c" - > - > - json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - update_mask: < - field_paths: "b.c" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-del-merge.textproto b/tests/unit/v1beta1/testdata/set-del-merge.textproto deleted file mode 100644 index b8d863105..000000000 --- a/tests/unit/v1beta1/testdata/set-del-merge.textproto +++ /dev/null @@ -1,37 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Delete sentinel can appear with a merge option. - -description: "set-merge: Delete with merge" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - fields: < - field: "b" - field: "c" - > - > - json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b.c" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-del-mergeall.textproto b/tests/unit/v1beta1/testdata/set-del-mergeall.textproto deleted file mode 100644 index af1e84524..000000000 --- a/tests/unit/v1beta1/testdata/set-del-mergeall.textproto +++ /dev/null @@ -1,31 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A Delete sentinel can appear with a mergeAll option. - -description: "set: Delete with MergeAll" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b.c" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto deleted file mode 100644 index bbf6a3d00..000000000 --- a/tests/unit/v1beta1/testdata/set-del-noarray-nested.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "set: Delete cannot be anywhere inside an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-del-noarray.textproto b/tests/unit/v1beta1/testdata/set-del-noarray.textproto deleted file mode 100644 index 07fc6497d..000000000 --- a/tests/unit/v1beta1/testdata/set-del-noarray.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "set: Delete cannot be in an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"Delete\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-del-nomerge.textproto b/tests/unit/v1beta1/testdata/set-del-nomerge.textproto deleted file mode 100644 index cb6ef4f85..000000000 --- a/tests/unit/v1beta1/testdata/set-del-nomerge.textproto +++ /dev/null @@ -1,17 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The client signals an error if the Delete sentinel is in the input data, but not -# selected by a merge option, because this is most likely a programming bug. - -description: "set-merge: Delete cannot appear in an unmerged field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - > - json_data: "{\"a\": 1, \"b\": \"Delete\"}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto deleted file mode 100644 index 54f22d95c..000000000 --- a/tests/unit/v1beta1/testdata/set-del-nonleaf.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a Delete is part of the value at a merge path, then the user is confused: -# their merge path says "replace this entire value" but their Delete says "delete -# this part of the value". This should be an error, just as if they specified -# Delete in a Set with no merge. - -description: "set-merge: Delete cannot appear as part of a merge path" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "h" - > - > - json_data: "{\"h\": {\"g\": \"Delete\"}}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto b/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto deleted file mode 100644 index 29196628b..000000000 --- a/tests/unit/v1beta1/testdata/set-del-wo-merge.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Without a merge option, Set replaces the document with the input data. A Delete -# sentinel in the data makes no sense in this case. - -description: "set: Delete cannot appear unless a merge option is specified" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"Delete\"}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-empty.textproto b/tests/unit/v1beta1/testdata/set-empty.textproto deleted file mode 100644 index c2b73d3ff..000000000 --- a/tests/unit/v1beta1/testdata/set-empty.textproto +++ /dev/null @@ -1,17 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - - -description: "set: creating or setting an empty map" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-merge-fp.textproto b/tests/unit/v1beta1/testdata/set-merge-fp.textproto deleted file mode 100644 index 68690f6f1..000000000 --- a/tests/unit/v1beta1/testdata/set-merge-fp.textproto +++ /dev/null @@ -1,40 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A merge with fields that use special characters. - -description: "set-merge: Merge with FieldPaths" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "*" - field: "~" - > - > - json_data: "{\"*\": {\"~\": true}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "*" - value: < - map_value: < - fields: < - key: "~" - value: < - boolean_value: true - > - > - > - > - > - > - update_mask: < - field_paths: "`*`.`~`" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-merge-nested.textproto b/tests/unit/v1beta1/testdata/set-merge-nested.textproto deleted file mode 100644 index 0d1282818..000000000 --- a/tests/unit/v1beta1/testdata/set-merge-nested.textproto +++ /dev/null @@ -1,41 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A merge option where the field is not at top level. Only fields mentioned in the -# option are present in the update operation. - -description: "set-merge: Merge with a nested field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "h" - field: "g" - > - > - json_data: "{\"h\": {\"g\": 4, \"f\": 5}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "h" - value: < - map_value: < - fields: < - key: "g" - value: < - integer_value: 4 - > - > - > - > - > - > - update_mask: < - field_paths: "h.g" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto deleted file mode 100644 index ca41cb034..000000000 --- a/tests/unit/v1beta1/testdata/set-merge-nonleaf.textproto +++ /dev/null @@ -1,46 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a field path is in a merge option, the value at that path replaces the stored -# value. That is true even if the value is complex. - -description: "set-merge: Merge field is not a leaf" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "h" - > - > - json_data: "{\"h\": {\"f\": 5, \"g\": 6}, \"e\": 7}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "h" - value: < - map_value: < - fields: < - key: "f" - value: < - integer_value: 5 - > - > - fields: < - key: "g" - value: < - integer_value: 6 - > - > - > - > - > - > - update_mask: < - field_paths: "h" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-merge-prefix.textproto b/tests/unit/v1beta1/testdata/set-merge-prefix.textproto deleted file mode 100644 index 1e2c2c502..000000000 --- a/tests/unit/v1beta1/testdata/set-merge-prefix.textproto +++ /dev/null @@ -1,21 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The prefix would make the other path meaningless, so this is probably a -# programming error. - -description: "set-merge: One merge path cannot be the prefix of another" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - fields: < - field: "a" - field: "b" - > - > - json_data: "{\"a\": {\"b\": 1}}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-merge-present.textproto b/tests/unit/v1beta1/testdata/set-merge-present.textproto deleted file mode 100644 index f6665de5c..000000000 --- a/tests/unit/v1beta1/testdata/set-merge-present.textproto +++ /dev/null @@ -1,20 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The client signals an error if a merge option mentions a path that is not in the -# input data. - -description: "set-merge: Merge fields must all be present in data" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "b" - > - fields: < - field: "a" - > - > - json_data: "{\"a\": 1}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-merge.textproto b/tests/unit/v1beta1/testdata/set-merge.textproto deleted file mode 100644 index 279125253..000000000 --- a/tests/unit/v1beta1/testdata/set-merge.textproto +++ /dev/null @@ -1,32 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Fields in the input data but not in a merge option are pruned. - -description: "set-merge: Merge with a field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - > - json_data: "{\"a\": 1, \"b\": 2}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto b/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto deleted file mode 100644 index 16df8a22b..000000000 --- a/tests/unit/v1beta1/testdata/set-mergeall-empty.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# This is a valid call that can be used to ensure a document exists. - -description: "set: MergeAll can be specified with empty data." -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - update_mask: < - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto b/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto deleted file mode 100644 index 1fbc6973c..000000000 --- a/tests/unit/v1beta1/testdata/set-mergeall-nested.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# MergeAll with nested fields results in an update mask that includes entries for -# all the leaf fields. - -description: "set: MergeAll with nested fields" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{\"h\": { \"g\": 3, \"f\": 4 }}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "h" - value: < - map_value: < - fields: < - key: "f" - value: < - integer_value: 4 - > - > - fields: < - key: "g" - value: < - integer_value: 3 - > - > - > - > - > - > - update_mask: < - field_paths: "h.f" - field_paths: "h.g" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-mergeall.textproto b/tests/unit/v1beta1/testdata/set-mergeall.textproto deleted file mode 100644 index cb2ebc52b..000000000 --- a/tests/unit/v1beta1/testdata/set-mergeall.textproto +++ /dev/null @@ -1,37 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The MergeAll option with a simple piece of data. - -description: "set: MergeAll" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{\"a\": 1, \"b\": 2}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - fields: < - key: "b" - value: < - integer_value: 2 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-nodel.textproto b/tests/unit/v1beta1/testdata/set-nodel.textproto deleted file mode 100644 index 0fb887d46..000000000 --- a/tests/unit/v1beta1/testdata/set-nodel.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel cannot be used in Create, or in Set without a Merge option. - -description: "set: Delete cannot appear in data" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"Delete\"}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-nosplit.textproto b/tests/unit/v1beta1/testdata/set-nosplit.textproto deleted file mode 100644 index 0ff3fadcf..000000000 --- a/tests/unit/v1beta1/testdata/set-nosplit.textproto +++ /dev/null @@ -1,37 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Create and Set treat their map keys literally. They do not split on dots. - -description: "set: don\342\200\231t split on dots" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a.b" - value: < - map_value: < - fields: < - key: "c.d" - value: < - integer_value: 1 - > - > - > - > - > - fields: < - key: "e" - value: < - integer_value: 2 - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-special-chars.textproto b/tests/unit/v1beta1/testdata/set-special-chars.textproto deleted file mode 100644 index f4122c9f0..000000000 --- a/tests/unit/v1beta1/testdata/set-special-chars.textproto +++ /dev/null @@ -1,38 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Create and Set treat their map keys literally. They do not escape special -# characters. - -description: "set: non-alpha characters in map keys" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "*" - value: < - map_value: < - fields: < - key: "." - value: < - integer_value: 1 - > - > - > - > - > - fields: < - key: "~" - value: < - integer_value: 2 - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto b/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto deleted file mode 100644 index 16ce4cfbd..000000000 --- a/tests/unit/v1beta1/testdata/set-st-alone-mergeall.textproto +++ /dev/null @@ -1,26 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ServerTimestamps, then no update operation -# should be produced. - -description: "set: ServerTimestamp alone with MergeAll" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{\"a\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-alone.textproto b/tests/unit/v1beta1/testdata/set-st-alone.textproto deleted file mode 100644 index 6ce46d7f1..000000000 --- a/tests/unit/v1beta1/testdata/set-st-alone.textproto +++ /dev/null @@ -1,28 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ServerTimestamps, then an update operation -# with an empty map should be produced. - -description: "set: ServerTimestamp alone" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-merge-both.textproto b/tests/unit/v1beta1/testdata/set-st-merge-both.textproto deleted file mode 100644 index 5cc7bbc9e..000000000 --- a/tests/unit/v1beta1/testdata/set-st-merge-both.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Just as when no merge option is specified, ServerTimestamp sentinel values are -# removed from the data in the update operation and become transforms. - -description: "set-merge: ServerTimestamp with Merge of both fields" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - fields: < - field: "b" - > - > - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto deleted file mode 100644 index f513b6c80..000000000 --- a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf-alone.textproto +++ /dev/null @@ -1,37 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a field path is in a merge option, the value at that path replaces the stored -# value. If the value has only ServerTimestamps, they become transforms and we -# clear the value by including the field path in the update mask. - -description: "set-merge: non-leaf merge field with ServerTimestamp alone" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "h" - > - > - json_data: "{\"h\": {\"g\": \"ServerTimestamp\"}, \"e\": 7}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - update_mask: < - field_paths: "h" - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "h.g" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto deleted file mode 100644 index e53e7e268..000000000 --- a/tests/unit/v1beta1/testdata/set-st-merge-nonleaf.textproto +++ /dev/null @@ -1,49 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a field path is in a merge option, the value at that path replaces the stored -# value, and ServerTimestamps inside that value become transforms as usual. - -description: "set-merge: non-leaf merge field with ServerTimestamp" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "h" - > - > - json_data: "{\"h\": {\"f\": 5, \"g\": \"ServerTimestamp\"}, \"e\": 7}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "h" - value: < - map_value: < - fields: < - key: "f" - value: < - integer_value: 5 - > - > - > - > - > - > - update_mask: < - field_paths: "h" - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "h.g" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto b/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto deleted file mode 100644 index 3222230dc..000000000 --- a/tests/unit/v1beta1/testdata/set-st-merge-nowrite.textproto +++ /dev/null @@ -1,28 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If all the fields in the merge option have ServerTimestamp values, then no -# update operation is produced, only a transform. - -description: "set-merge: If no ordinary values in Merge, no write" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "b" - > - > - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-mergeall.textproto b/tests/unit/v1beta1/testdata/set-st-mergeall.textproto deleted file mode 100644 index b8c53a566..000000000 --- a/tests/unit/v1beta1/testdata/set-st-mergeall.textproto +++ /dev/null @@ -1,40 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Just as when no merge option is specified, ServerTimestamp sentinel values are -# removed from the data in the update operation and become transforms. - -description: "set: ServerTimestamp with MergeAll" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - all: true - > - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-multi.textproto b/tests/unit/v1beta1/testdata/set-st-multi.textproto deleted file mode 100644 index 375ec18d6..000000000 --- a/tests/unit/v1beta1/testdata/set-st-multi.textproto +++ /dev/null @@ -1,38 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ServerTimestamp field. Since all the -# ServerTimestamp fields are removed, the only field in the update is "a". - -description: "set: multiple ServerTimestamp fields" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c.d" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-nested.textproto b/tests/unit/v1beta1/testdata/set-st-nested.textproto deleted file mode 100644 index abfd2e8fd..000000000 --- a/tests/unit/v1beta1/testdata/set-st-nested.textproto +++ /dev/null @@ -1,35 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A ServerTimestamp value can occur at any depth. In this case, the transform -# applies to the field path "b.c". Since "c" is removed from the update, "b" -# becomes empty, so it is also removed from the update. - -description: "set: nested ServerTimestamp field" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto deleted file mode 100644 index 241d79151..000000000 --- a/tests/unit/v1beta1/testdata/set-st-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. - -description: "set: ServerTimestamp cannot be anywhere inside an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-st-noarray.textproto b/tests/unit/v1beta1/testdata/set-st-noarray.textproto deleted file mode 100644 index 591fb0343..000000000 --- a/tests/unit/v1beta1/testdata/set-st-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. Firestore transforms -# don't support array indexing. - -description: "set: ServerTimestamp cannot be in an array value" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/set-st-nomerge.textproto b/tests/unit/v1beta1/testdata/set-st-nomerge.textproto deleted file mode 100644 index 20c0ae1fb..000000000 --- a/tests/unit/v1beta1/testdata/set-st-nomerge.textproto +++ /dev/null @@ -1,33 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the ServerTimestamp value is not mentioned in a merge option, then it is -# pruned from the data but does not result in a transform. - -description: "set-merge: If is ServerTimestamp not in Merge, no transform" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - option: < - fields: < - field: "a" - > - > - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto deleted file mode 100644 index 5e187983f..000000000 --- a/tests/unit/v1beta1/testdata/set-st-with-empty-map.textproto +++ /dev/null @@ -1,42 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp -# should be stripped out but the empty map should remain. - -description: "set: ServerTimestamp beside an empty map" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - map_value: < - > - > - > - > - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/set-st.textproto b/tests/unit/v1beta1/testdata/set-st.textproto deleted file mode 100644 index 8bceddcee..000000000 --- a/tests/unit/v1beta1/testdata/set-st.textproto +++ /dev/null @@ -1,36 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with the special ServerTimestamp sentinel is removed from the data in the -# update operation. Instead it appears in a separate Transform operation. Note -# that in these tests, the string "ServerTimestamp" should be replaced with the -# special ServerTimestamp value. - -description: "set: ServerTimestamp with data" -set: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/test-suite.binproto b/tests/unit/v1beta1/testdata/test-suite.binproto deleted file mode 100644 index 6e3ce397375224cab4ee93e9ae05495a182bc983..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55916 zcmdsA3v?V;dCtsAwtVd*$~X^u5|YWpBxWs3(b%yQ5tDdhyF{^LVkaaJf<4+DS(~hO zm7SFxV>S@Vqoh0=punL(p@s4ew1onN@@NaC6bfx9l=g6-htdO-LMi1OP7D3+Yjz&H ztJynaIcm;{w3e-V|NGtl|L=eQ*Cj8~$ogYmYs$hG%e5+v^5VQ#Zy9y6eWBU7*DJUD z3Gw0PJrfnT<<7Xi=TB^|c(d+et@We{`78d!O%oMkhuZIv&uZlCa?^8L-jszIs%7(o zn%Ypt$SK>kr>x1g*&tV@TZFOK)<@1FHz>}ynrN<}k80#yIyqOaqTHBYsCf^VuhkVt z-Nx5(_vD!6j9+ulm}SpkS*PqWzTR!O=9->okKxZ1{JGU!^xF5d+vYp9)N5|DHJ?zV z?n1ie^QO-wblqzCBO3X# zMm7gn(Vef>k6DeT<$Epm(XCtF{6g!bHSaE%Z&PIk{Z!kWO%2KQ3=&lyen=-zkSnS* z>fd?(8(NmXP^-4AMjevX^389lF5|LOmhFzS{kQD$INkC|s|}X$@X4{-CgeCRuiwoU zd57Y@pHB)P#5mhoOV*GANTG~xU{^hS(8&w+&aT!Q^{sAgVa~P6b8gcux4forRqJ#^ zt^2%_zWEQTa9j*HidZ?mjR#OXD4=*KJrs8`C~i}PVw7k_L-6#tgyBI3!%4D9a>5?3 z=CwSl?AGfIsMv)C&uvl^s4g{Cr@lz&IH445K8fA7H1fs*xk}FM)@o15rPF30VG3%N!YsWop^K0q1FdxlrcurZ73x=WaAOYt=-8t)+Hl%W( zj~3{UDpT}FL->Cf|F0q+)XC*?b7GiTC#tPE&K19@x-B>j*wH81A~uEHHXmJfWN&bG zWkQyGQYV*4%sT88g6nrw;kq&km*B@Zx3itt_TJaEd_q%LCEA*gUm8fCZEtd3cU^;=T)qjY+18auU0W%lM|BM%(G*Y*s$Hlc$z@W81%4q>=Fq)E6}qiaCX^O(XM)ht0{}m{>;N8cZc&ux z^@IW#ljkq$#Oekn7dyd-?^H!|ybqd5!Cr}Q#$@|jddFaz$Cb82e_RT+3MfHX# zstLgs1{1%{CH@v%`_X0R+NV1E6uDNIM622T0Mh?Uo!F9L1B5%`$vGC+0ab8siGthX z9FIU8`!|8<**3qj>_8uNo~;OU@~}6cF>onk8lz)&K2#uAbl-rm&hX7|sP4dlt~-!W z7u~iX_(v;v526}H>_N=*|B_C&$UTT1>(-3tS1XLj2Zqi#Co zqqXZHL^{D8|PNx$_Vgu z=^fuX?$#DP^O=eyikmBQc6LFLLHImUlq5x+DqShb=vtAK2+s;5;1P|COA0znl;E#0 z9}CU|NI+EhKL7bj?w-aAIp+colJNm5rPTY{{LBF7cu zWF?gw<2er8?`!1x2$cz@TY^kak<2YU$P8<>5S`!FIzIV07rM1Z-8278)d{_UQHPn^m_Tp)z|WH5S(r)iBvFG-9%)bd8bO-&Cc4 zsTi3(NMMMlYWF4LWB()fajD>#{Z&;Q`US`AcIpj2OjIqXK5?<*h2a&9WkV`lDLEKpeaa@?rJh+mES2pKnNO zMS&kx9^J!PpF^T213#`vy2P?s}*@{6dbHE@L zuF>zxN~&T6%N#U<1zKOBk*m8hXrH)Da=i51mf^kZ={P)3kvrkc1fHScfE`w$Ct4Z_&uECAg-4V8d1nS(&LqRzU6L@C$b5 zqa_Ga^k|y>;#HvMts1$uvx_WwHWU$Ef+odal{rAL3M3*Fd0mIZ1fwlMqhh4W93)Z& zB2g#A-Qw{Bm4&J7|5kMe_Y6}F68TW}_8Xb}UNpM(PpAxIn|Hu#9`knBYW5ctp`c|) z<|vQa(cA}GPUE!g=caZ89x_)Ukp&6X$_51LWRU+ds?&lQUNZJ#X~fFq@w9B=ZV|UK zL@||o1#HC|PuX)=DQabo;oCMWd=}rgTkW>}()32t5tER@-1sd0a!ffiu#sqU#`%sx z%fMy<#1Q>(3?B;QUl6IqK5w0;+dO44I5-wAPY|>}t%}01Q2^XswtgY?*6%av5ZQyB zztY%6yz45%dv2oyMs3SS3^3^&P^Iw@w()G=kzL33{TgihsnpB9%^c<%p21?ClbK(1 zpNGiHxGN%x0OND3>RYTv%Bp$ptPO@xLx64foOzjCK4o_l7BLuh$fCKOZTki3x&JP@ z{d2jo{jg;FRRqycdWns~!JO0kXhFf#+}0c}v8qroW+)QvLDM+JE9PukCdbM}+xGKP z^T8b^DmRZ7kB%RG@Tju#3&#afCO4uH#58QC+v8^*13+p-W%n{ue@h2~P-&bt5R{V| z$aX5C+>HDX)w5lZW7Y!Kn*n~xDK7ND>}@|(Q^S_o2>1%TI3X$N%`R4B)|zRckWQSO zuUB;c7=)b}^vjIA-AS7vnek(5At&67iz-aJe7ZyZ^wkC8RYSZNH}xl>MRe??U( ziQ{-_!Ce01`2{-J7R3ZhZZs>^y4w^fuB?o@-Mum-L zs?Y=3nx~c-7c8z|V$5Q{PgU5&i489gH(0_ac2e4Kl+&_zoZtf#W*+1xcpi2PHC%_Y zWw67B2GC#C$yA8jQUh)AE8!y<;fAZGV;}Z|`Q7Oc$Xy&}#oVwwK>ZI* zP|Tpb{qeef*g`ba-h2Ijt-5|@KBrpx0P2}usp!5d$_V*7^8K+~aYcqhgu2^MLWEu; zUbzapr^2-}@18>2OpM#Vr-}e&dzu(pNzK|*=2^;1Ll=|q@ykwGVaZwZi$2H0kUn2QS#N7>=3_+dq%3?=HfQj6s@J(yjT8=wwN5&FPq z`60Bql1mXu`$g4qMYW{;N7Gv*FVdvfmM#8_W$+`f{TvAz=hq@wLg`%YndxkwNX-ui zBB*3$3FMZXA6*?cQj342x<(;FL2IKyic&hfu6oG+8}jt!dTOrU+RFd1wS^W(RBewG=?%w4J7;W)Tc2#c`_bSe@7P@tu^+KDZley$PIhj!BS4DQ~kzPhi?g z$>}kLlW7VJ%qC!~U{lN`SFx>lSaB7I_)yyA=|h*oTE?-Nd~%t@D{9@quHq4h;?or8 z#i87pHxOKkAUrv8(!CE<{T7UZkPK1Vsai(?akWxZ?KwdW)vyUi3;*PlF+YXSOsZbd zJ|5A*3jegMg?~TN$OgH_P+AA`4ir^yy7HFStNhbGnOaXBq$h5cPF8NL^UaTOw~s7X z#Pa!yl`EzYDWZJatIE2)tV@n2-;uYVou6Ft62(i!pDH?p%+p-F1k)#NfF(yWb-AjT zXo-Z}p}3|As!WQ?B@#=!$p^@s&J*5DK^^L6rX>;*xRCnlV5VFmk+!G_83kv8F>|eM zq9qa$6opRgz^Ggzk-ETXMdBD!w7_wb#IawoQ}IozS~dn}?_#F`N_O(S#%1%r^TH9fm`HW#BUB=%BYIUhFtnG zs!ye7{lgtEYlf_f)6*AFqN-R@Rb0N3D%uw?E49m5RfWjtHc4)rW&4!w6i;Vi75V~7 zk`f+G-tp1xbSNxl>@L%-wu<%zloW){w0?NFis#$_dis@YjTRGyT}T`_MMW*1WzqN+i9w^qJ04+NnM$239^=_)Qa{6j8K_ix+Tb5rfRDn zXygJ(XXK3@b-(#K7CRcJ&2}f-_QUB9(xc{TGK{bjewBvj6%~|$+8USD)*#P*M%G-E zk*lbYx9FWihG!YJ@EDJUw_2L)ttI+C4-#6?hV%Z#1|ByUv%Ck&-a-qhBo#c`Q*Jc9 zqHwH5QNpdmO!l>AOXE&Z%~?h%_v7JW>X@yu98WNWR{B&79;w~ zl}>)9kta!|5kI=PyEP2Y=Tzajq@zfZS-JN8sbTpwii5LTZIg^OLMK6exSgn*2-9|6 z^eXOA8{Z^Y<%6^YlST2rh<6A%v|CvQf@o&yD5sCGY$t89cxE!B5EBx|i(HP#lX8^3 z$8ptw_$YwMoo3OD>Ae7bH)qK2{_X z3OPGL*fgi#JdW}_OrlYfKr?Nz7(UN@uk~zm4k^MFnx|qTw}>BBe7X}dJy0@y;+Du` zUyfpGItLLskaSY1wA#S4MaH80)<2)hQ9Pbm>8y}5%=%qTdX43JXQzMr~;SF82`4aJNt&;BIOT4H@T2=83W>x){CL^|TbJ?QB&7H6MZ!@iW zL$B1TWnFgEB~RX7kXJsr>?QK6oS&%1M3^SQml?_gW%{K;6<3nq?Oyz0Cm@v1CDDs{ z+KW=2=|9W#V(Q0&5Ut#$xaJ8GU7S~B-k&3do z0pizrGH9+@R)3|(55TaF>_GXuxj>l^cDPE|qb4g8%b4N?mLgyyxw2orm80A|GSIqI zs?o-x`!i_86`gJMH9c;K_Q<(%-kS?&q6C`4*u#ox7gug-P%PCP9g{`LLX1hna?kRv z9M!f5%p#;;*ewQ0=pkge@fCi!b_GKllm8zCzIF+1kP#F=+Zn*VgAV@cHaA0fu7%vk;&iG6n?D|C}Gtz03g{DuluvQz6ovZ5LN+#!pm% zh>V}uL#!_^$OuA$^%HuP9`JIzMMOCPW677wfO%a3+n?K*&9VxF&E33&7k}3t72+~#vEf?j6-(YJ@9co znj6K(vQWIai?5^?$+Xu&6y(_2Lj4XB$v%!^uLG)6TzjKTgo@2*Qz(BOy|<%uCETM) zat>xeZB&xOCt_6mHhfl5DnbWdpX4m(NES*gK}nZwTnl6+ja%GYrop#dR zp^<%2(qbDNVyXVAJkGPS!1x3Tqi}!6D6rd<$B^E(D@tgB$CjW^F_DDx|M?;M1kW+D z-l>t@QL?%+-4dkDWP$FA4rxj4C*DQ{fN17l>OyIP36~(XoQ2d)2~qW2GHcKoHC(D(X?DAJEGgOPd0?L z#F}}^7H!cONnvxor@DT$c*a6VVHWvG$j_*cH(;pmY!_db-o;#_DO*P|3Np4Z3{Tlo zZT52+qsQ3bTuAV)CWe!lN}voGIjafMwnSZ&&y{N3a9y zoy)%;rPoKz84Qf|WQ4WNeb&*VK?B`P=}E})E6NVqXrc>7LBi5emY#$bJfdfb1fwGp zKJN)u0q3+l2N54NM8F`kjRyLcxlQad`@D*eNx|u4S0E!gZA_wSUO-l3&~c1O2>l=i zq~fI5$HSo9(r8F-!N?4$Bk|x~JjSltkiir|_2Y4Odu0*1gk|)63acm8_8O5}oy)n= z*0w>Fe?CZ8mOI~VV$K3Id9l;Q)puT|$_p`adfxX_M7iW;6HCF>=L*gMQx^Zu$ZjmQ zA}vFvU_z;}@X^$qxx-MJf&8OJCc7-b>YEcYrQD*A_Ys;+y{zkP_8W?eL!#bQ-4^F1 z+qz8QX%Hm@S@OXE9S5mGRb0+}#JqpeYo464xGp#h1zB!c<(j+bd!+-!S!g!y#ZUYR z@!{q@6SSq`jO%;;#O8`O>n_$>PnxJS%8S$t@h5IFbaIxto~%O)>5(Hx>h8RE{UGFKBT{=RZz1iuqe7 z8ff(#TB=`qM)o=Jss&6T8sT0?&^^%5%6#4hvR=%4CwlC85?!?!&o9r3L%ZG_wsG9p z=2lM3m1GkbG9YP)lEnWxwqg|h=pNSC(fLBilAzMH!ZcbmThwh<>&K9KZq*lSHO1`_ zG~%g|UAxR7I*>WaX|AUjNk&dI2e z%LEk%XJ^q?(sJt+YkzgVD#g(+-K;ddZs~@;xnQnkD=+FJg{yVaCY!}C@J=s5(_INFNrGwwbusLah{7|!(2`Bz zOHa)K7%bnSnV`0qEx!8P=bNVU!IIh(UEV}&VsvI(^!(C74oe(%zvMEkkiBV?8C>sJrt4s9j8iIs@2tQDQ zIDgV(RgeVNtcvX*1XxIl5a1bu39vEZtfJs)*BWa|tAU@PPhKs~94X8~s%J*^DMCrc z|04htg-#$`A0@Q5M?zX&Z2Bl}n)h0Bjf%J+N`s~J?`rg~r*;Pk**#R?N2XR8L-w9Y z+ZdTz{Q)#%2|-sXmL~2^6Tz` zKi6oL9#h-P{f_EOse26Z%-T*`(FGsUwQcjZ?)yDOb`(`E33~v=mvwUEU{KJ)Mzo)# zh814*pa*UVE~MNAb^lbJ$W^1dNFLz29hDt@>P9K?s+f|`=;W5c5M@>xnvQ$-FQR`Z z&chR76Ce51D&TfZZJ4bPjs2xO@q9MJvvXw_hJ9X$fvJ`7q{gBh6tCwfPUnf@d<;dMp~#F&7;_0YpTc-k5mLzd4|Q@^H&Dl@z6(^{82ZR| z#c+&a92b6h6Mdx`z)|~}U-;-X2NyzZyOFQ@jKhs*8AgZXsh?B@S>b-HKz5~pJAr6m zKs-`V68#vaUG0fnk4aW@@CHN$sjOYMdixVr~q! zv_|02i!UvDoma(@`q9|p*u@zu^tv0V>|~)Xa%t4;QUa6PqnPGo;EIWb-UB8nJzZ&r zON~o9o}gr408<|$7YeI(Oc{V)9a0{ zd)*?JCiXfy{h=JDUpI{#rYxbv@?W9_4wmT7N2hjknEK-BjYc0f!1K%UE*|59C@Fb3 z-7c9njebn==Ng$3n-MmsxXpT^*}#pd+^Err0ekGIZbeDN%S|eV{(2aw*PuvZh6$>6eCCQmdo(C z&as`+=}vJD(aCl8PDKU~LlknM4KY4OTcJbS(2C(7&3IFGP7ij+*pDb)R!$E_qwHeB#!m{Wc0h7;)sd15R~;#^ ze1bj6!obNDh8Wm*hPu&{TySV$OUp8J5`pm0^ytyaM~282DHkwUrG;u$XRQ++#Z*h6c9iY&v<&AUh=I1c^K{0_wx-U#ZY`rraRMWd@OYelAz0-)Lmn>tNM^VAH+M zpc|7Sm~p@WzF=Q64!B)vZ^_=tTU-_Zu93sDd#Jo5DqlIak-N;NqnWPag%#aD(VBiV zV!Tq*i`_XgZ1WDe$_U>muhPk~%I&nrDP83++YRpm>5l2t<&oh$<ekkyd^2zd8gPbIE5Eq!kJk!l5fG9G27@|zW2jVM-4>^yA z4fUH(K(;g^vfrTsrpxun)LsAl|J*_I06l-pDQt;&Z%QjVcSI%i-O z2X&HA9Qy_1r#Y@%^*~LsdY1i8juzkVVe%d1Q%Q}yN2K_QTKySs&OPq=R;_xUXUV_g z=@ZlM$v5PZxbmR`iIXuXluPVS^0JmipaWhGMhoo&crPypaWH&HP?$U8ts`VoZt}ir zjXu)S^!$ZJ-4_)wKFSF#zxo`{s-pTI(cP_$h3W6-6HyGP%IGHu#8#^7E=-f*j0}V& zGQ9E)dXfwk>Ej+E-jU}qrL3ote3*r+llu|k>MUw2NoHtZaZG(l`K(Toomp829Fxm| z7syNWz{_w=NwRhvP@TMJh}Md8^^uM5ft(r^IAu$$UG1y12Q>S zUc&hFr%(H#kU_qaT;+695b}s62K$ypsFGBMwrV+!if^L$Me@PzY_B;N9NE2RPv~%< zF%(3p=UktrqFC(w;H+WYyl47#`K0v(h|-gTUnkZYYj9j@8ZwKdZq-)K=8+F;WKUmo zQSs7j(yZwKUWiw9b{6&RXhzgQxSq=$xi`Ri7~l=8nsaMc!Vm2|aPaASEfj<;)+#U@ XkdCyl;JMA|W2cKk$_T;UZ_xe^@qaSD diff --git a/tests/unit/v1beta1/testdata/update-all-transforms.textproto b/tests/unit/v1beta1/testdata/update-all-transforms.textproto deleted file mode 100644 index 225cc61e4..000000000 --- a/tests/unit/v1beta1/testdata/update-all-transforms.textproto +++ /dev/null @@ -1,67 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can be created with any amount of transforms. - -description: "update: all transforms in a single call" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": [\"ArrayUnion\", 1, 2, 3], \"d\": [\"ArrayRemove\", 4, 5, 6]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto deleted file mode 100644 index 8c79a31d5..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-alone.textproto +++ /dev/null @@ -1,36 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ArrayRemove, then no update operation should -# be produced. - -description: "update: ArrayRemove alone" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayRemove\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto deleted file mode 100644 index 2362b6e09..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-multi.textproto +++ /dev/null @@ -1,69 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayRemove field. Since all the ArrayRemove -# fields are removed, the only field in the update is "a". - -# b is not in the mask because it will be set in the transform. c must be in the -# mask: it should be replaced entirely. The transform will set c.d to the -# timestamp, but the update will delete the rest of c. - -description: "update: multiple ArrayRemove fields" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3], \"c\": {\"d\": [\"ArrayRemove\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto deleted file mode 100644 index 143790179..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-nested.textproto +++ /dev/null @@ -1,52 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayRemove value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "update: nested ArrayRemove field" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayRemove\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto deleted file mode 100644 index 04eca965c..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayRemove. Firestore transforms don't support array indexing. - -description: "update: ArrayRemove cannot be anywhere inside an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto deleted file mode 100644 index bbd27bf01..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "update: ArrayRemove cannot be in an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto deleted file mode 100644 index 4888b44f1..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayRemove\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayremove.textproto b/tests/unit/v1beta1/testdata/update-arrayremove.textproto deleted file mode 100644 index 3b767cf48..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayremove.textproto +++ /dev/null @@ -1,50 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayRemove is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "update: ArrayRemove with data" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayRemove\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto deleted file mode 100644 index ec12818da..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-alone.textproto +++ /dev/null @@ -1,36 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ArrayUnion, then no update operation should -# be produced. - -description: "update: ArrayUnion alone" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayUnion\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto deleted file mode 100644 index 8edf6a3af..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-multi.textproto +++ /dev/null @@ -1,69 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayUnion field. Since all the ArrayUnion -# fields are removed, the only field in the update is "a". - -# b is not in the mask because it will be set in the transform. c must be in the -# mask: it should be replaced entirely. The transform will set c.d to the -# timestamp, but the update will delete the rest of c. - -description: "update: multiple ArrayUnion fields" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3], \"c\": {\"d\": [\"ArrayUnion\", 4, 5, 6]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - append_missing_elements: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto deleted file mode 100644 index 217e2e2ca..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-nested.textproto +++ /dev/null @@ -1,52 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayUnion value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "update: nested ArrayUnion field" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": [\"ArrayUnion\", 1, 2, 3]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto deleted file mode 100644 index 032678183..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayUnion. Firestore transforms don't support array indexing. - -description: "update: ArrayUnion cannot be anywhere inside an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto deleted file mode 100644 index c199f9f73..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "update: ArrayUnion cannot be in an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, [\"ArrayRemove\", 1, 2, 3]]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto deleted file mode 100644 index ee022f849..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion-with-st.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "update: The ServerTimestamp sentinel cannot be in an ArrayUnion" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [\"ArrayUnion\", 1, \"ServerTimestamp\", 3]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-arrayunion.textproto b/tests/unit/v1beta1/testdata/update-arrayunion.textproto deleted file mode 100644 index 81b240b89..000000000 --- a/tests/unit/v1beta1/testdata/update-arrayunion.textproto +++ /dev/null @@ -1,50 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayUnion is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "update: ArrayUnion with data" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": [\"ArrayUnion\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-badchar.textproto b/tests/unit/v1beta1/testdata/update-badchar.textproto deleted file mode 100644 index 656ff53b6..000000000 --- a/tests/unit/v1beta1/testdata/update-badchar.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The keys of the data given to Update are interpreted, unlike those of Create and -# Set. They cannot contain special characters. - -description: "update: invalid character" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a~b\": 1}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-basic.textproto b/tests/unit/v1beta1/testdata/update-basic.textproto deleted file mode 100644 index 9da316f58..000000000 --- a/tests/unit/v1beta1/testdata/update-basic.textproto +++ /dev/null @@ -1,30 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A simple call, resulting in a single update operation. - -description: "update: basic" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-complex.textproto b/tests/unit/v1beta1/testdata/update-complex.textproto deleted file mode 100644 index 1a6d9eff6..000000000 --- a/tests/unit/v1beta1/testdata/update-complex.textproto +++ /dev/null @@ -1,65 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A call to a write method with complicated input data. - -description: "update: complex" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - array_value: < - values: < - integer_value: 1 - > - values: < - double_value: 2.5 - > - > - > - > - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "c" - value: < - array_value: < - values: < - string_value: "three" - > - values: < - map_value: < - fields: < - key: "d" - value: < - boolean_value: true - > - > - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-del-alone.textproto b/tests/unit/v1beta1/testdata/update-del-alone.textproto deleted file mode 100644 index 8f558233f..000000000 --- a/tests/unit/v1beta1/testdata/update-del-alone.textproto +++ /dev/null @@ -1,25 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the input data consists solely of Deletes, then the update operation has no -# map, just an update mask. - -description: "update: Delete alone" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": \"Delete\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-del-dot.textproto b/tests/unit/v1beta1/testdata/update-del-dot.textproto deleted file mode 100644 index c0ebdf61f..000000000 --- a/tests/unit/v1beta1/testdata/update-del-dot.textproto +++ /dev/null @@ -1,46 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# After expanding top-level dotted fields, fields with Delete values are pruned -# from the output data, but appear in the update mask. - -description: "update: Delete with a dotted field" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b.c\": \"Delete\", \"b.d\": 2}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "d" - value: < - integer_value: 2 - > - > - > - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b.c" - field_paths: "b.d" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-del-nested.textproto b/tests/unit/v1beta1/testdata/update-del-nested.textproto deleted file mode 100644 index ed102697e..000000000 --- a/tests/unit/v1beta1/testdata/update-del-nested.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a top-level key. - -description: "update: Delete cannot be nested" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": {\"b\": \"Delete\"}}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto deleted file mode 100644 index a2eec4966..000000000 --- a/tests/unit/v1beta1/testdata/update-del-noarray-nested.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "update: Delete cannot be anywhere inside an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-del-noarray.textproto b/tests/unit/v1beta1/testdata/update-del-noarray.textproto deleted file mode 100644 index a7eea87ef..000000000 --- a/tests/unit/v1beta1/testdata/update-del-noarray.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "update: Delete cannot be in an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"Delete\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-del.textproto b/tests/unit/v1beta1/testdata/update-del.textproto deleted file mode 100644 index ec443e6c7..000000000 --- a/tests/unit/v1beta1/testdata/update-del.textproto +++ /dev/null @@ -1,32 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a field's value is the Delete sentinel, then it doesn't appear in the update -# data, but does in the mask. - -description: "update: Delete" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"Delete\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-exists-precond.textproto b/tests/unit/v1beta1/testdata/update-exists-precond.textproto deleted file mode 100644 index 3c6fef4e2..000000000 --- a/tests/unit/v1beta1/testdata/update-exists-precond.textproto +++ /dev/null @@ -1,14 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update method does not support an explicit exists precondition. - -description: "update: Exists precondition is invalid" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - exists: true - > - json_data: "{\"a\": 1}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto b/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto deleted file mode 100644 index c3bceff3e..000000000 --- a/tests/unit/v1beta1/testdata/update-fp-empty-component.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Empty fields are not allowed. - -description: "update: empty field path component" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a..b\": 1}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto b/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto deleted file mode 100644 index d2cee270d..000000000 --- a/tests/unit/v1beta1/testdata/update-nested-transform-and-nested-value.textproto +++ /dev/null @@ -1,58 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# For updates, top-level paths in json-like map inputs are split on the dot. That -# is, an input {"a.b.c": 7} results in an update to field c of object b of object -# a with value 7. In order to specify this behavior, the update must use a -# fieldmask "a.b.c". However, fieldmasks are only used for concrete values - -# transforms are separately encoded in a DocumentTransform_FieldTransform array. - -# This test exercises a bug found in python -# (https://github.com/googleapis/google-cloud-python/issues/7215) in which nested -# transforms ({"a.c": "ServerTimestamp"}) next to nested values ({"a.b": 7}) -# incorrectly caused the fieldmask "a" to be set, which has the effect of wiping -# out all data in "a" other than what was specified in the json-like input. - -# Instead, as this test specifies, transforms should not affect the fieldmask. - -description: "update: Nested transforms should not affect the field mask, even\nwhen there are other values that do. Transforms should only affect the\nDocumentTransform_FieldTransform list." -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a.b\": 7, \"a.c\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - integer_value: 7 - > - > - > - > - > - > - update_mask: < - field_paths: "a.b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-no-paths.textproto b/tests/unit/v1beta1/testdata/update-no-paths.textproto deleted file mode 100644 index b524b7483..000000000 --- a/tests/unit/v1beta1/testdata/update-no-paths.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# It is a client-side error to call Update with empty data. - -description: "update: no paths" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto b/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto deleted file mode 100644 index 8cfad4732..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-all-transforms.textproto +++ /dev/null @@ -1,82 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can be created with any amount of transforms. - -description: "update-paths: all transforms in a single call" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "c" - > - field_paths: < - field: "d" - > - json_values: "1" - json_values: "\"ServerTimestamp\"" - json_values: "[\"ArrayUnion\", 1, 2, 3]" - json_values: "[\"ArrayRemove\", 4, 5, 6]" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto deleted file mode 100644 index 68f0e147b..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-alone.textproto +++ /dev/null @@ -1,39 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ArrayRemove, then no update operation should -# be produced. - -description: "update-paths: ArrayRemove alone" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[\"ArrayRemove\", 1, 2, 3]" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto deleted file mode 100644 index b60c3f36a..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-multi.textproto +++ /dev/null @@ -1,76 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayRemove field. Since all the ArrayRemove -# fields are removed, the only field in the update is "a". - -description: "update-paths: multiple ArrayRemove fields" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "c" - > - json_values: "1" - json_values: "[\"ArrayRemove\", 1, 2, 3]" - json_values: "{\"d\": [\"ArrayRemove\", 4, 5, 6]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - remove_all_from_array: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto deleted file mode 100644 index 381be19d5..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-nested.textproto +++ /dev/null @@ -1,59 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayRemove value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "update-paths: nested ArrayRemove field" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "{\"c\": [\"ArrayRemove\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto deleted file mode 100644 index 35f6c67b2..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray-nested.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayRemove. Firestore transforms don't support array indexing. - -description: "update-paths: ArrayRemove cannot be anywhere inside an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, {\"b\": [\"ArrayRemove\", 1, 2, 3]}]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto deleted file mode 100644 index 45cab48dd..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-noarray.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayRemove must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "update-paths: ArrayRemove cannot be in an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto deleted file mode 100644 index 67b92a3ef..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove-with-st.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[\"ArrayRemove\", 1, \"ServerTimestamp\", 3]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto deleted file mode 100644 index d3866676e..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayremove.textproto +++ /dev/null @@ -1,57 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayRemove is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "update-paths: ArrayRemove with data" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "[\"ArrayRemove\", 1, 2, 3]" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - remove_all_from_array: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto deleted file mode 100644 index 48100e0ab..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-alone.textproto +++ /dev/null @@ -1,39 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ArrayUnion, then no update operation should -# be produced. - -description: "update-paths: ArrayUnion alone" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[\"ArrayUnion\", 1, 2, 3]" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto deleted file mode 100644 index 03772e5dd..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-multi.textproto +++ /dev/null @@ -1,76 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ArrayUnion field. Since all the ArrayUnion -# fields are removed, the only field in the update is "a". - -description: "update-paths: multiple ArrayUnion fields" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "c" - > - json_values: "1" - json_values: "[\"ArrayUnion\", 1, 2, 3]" - json_values: "{\"d\": [\"ArrayUnion\", 4, 5, 6]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - field_transforms: < - field_path: "c.d" - append_missing_elements: < - values: < - integer_value: 4 - > - values: < - integer_value: 5 - > - values: < - integer_value: 6 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto deleted file mode 100644 index 1420e4e28..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-nested.textproto +++ /dev/null @@ -1,59 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# An ArrayUnion value can occur at any depth. In this case, the transform applies -# to the field path "b.c". Since "c" is removed from the update, "b" becomes -# empty, so it is also removed from the update. - -description: "update-paths: nested ArrayUnion field" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "{\"c\": [\"ArrayUnion\", 1, 2, 3]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto deleted file mode 100644 index ab75bf38a..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray-nested.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ArrayUnion. Firestore transforms don't support array indexing. - -description: "update-paths: ArrayUnion cannot be anywhere inside an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, {\"b\": [\"ArrayUnion\", 1, 2, 3]}]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto deleted file mode 100644 index fac72644f..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-noarray.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# ArrayUnion must be the value of a field. Firestore transforms don't support -# array indexing. - -description: "update-paths: ArrayUnion cannot be in an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, 2, [\"ArrayRemove\", 1, 2, 3]]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto deleted file mode 100644 index d194c09bd..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion-with-st.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. It may not appear in -# an ArrayUnion. - -description: "update-paths: The ServerTimestamp sentinel cannot be in an ArrayUnion" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[\"ArrayUnion\", 1, \"ServerTimestamp\", 3]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto b/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto deleted file mode 100644 index fc56c1e29..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-arrayunion.textproto +++ /dev/null @@ -1,57 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with ArrayUnion is removed from the data in the update operation. Instead -# it appears in a separate Transform operation. - -description: "update-paths: ArrayUnion with data" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "[\"ArrayUnion\", 1, 2, 3]" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - append_missing_elements: < - values: < - integer_value: 1 - > - values: < - integer_value: 2 - > - values: < - integer_value: 3 - > - > - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-basic.textproto b/tests/unit/v1beta1/testdata/update-paths-basic.textproto deleted file mode 100644 index 515f29d6a..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-basic.textproto +++ /dev/null @@ -1,33 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A simple call, resulting in a single update operation. - -description: "update-paths: basic" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "1" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-complex.textproto b/tests/unit/v1beta1/testdata/update-paths-complex.textproto deleted file mode 100644 index 38a832239..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-complex.textproto +++ /dev/null @@ -1,72 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A call to a write method with complicated input data. - -description: "update-paths: complex" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "[1, 2.5]" - json_values: "{\"c\": [\"three\", {\"d\": true}]}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - array_value: < - values: < - integer_value: 1 - > - values: < - double_value: 2.5 - > - > - > - > - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "c" - value: < - array_value: < - values: < - string_value: "three" - > - values: < - map_value: < - fields: < - key: "d" - value: < - boolean_value: true - > - > - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto deleted file mode 100644 index 5dbb787de..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-del-alone.textproto +++ /dev/null @@ -1,28 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the input data consists solely of Deletes, then the update operation has no -# map, just an update mask. - -description: "update-paths: Delete alone" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "\"Delete\"" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto deleted file mode 100644 index bdf65fb0a..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-del-nested.textproto +++ /dev/null @@ -1,14 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a top-level key. - -description: "update-paths: Delete cannot be nested" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "{\"b\": \"Delete\"}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto deleted file mode 100644 index d3da15dda..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-del-noarray-nested.textproto +++ /dev/null @@ -1,16 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "update-paths: Delete cannot be anywhere inside an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, {\"b\": \"Delete\"}]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto deleted file mode 100644 index 9ebdd0945..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-del-noarray.textproto +++ /dev/null @@ -1,16 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Delete sentinel must be the value of a field. Deletes are implemented by -# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not -# support array indexing. - -description: "update-paths: Delete cannot be in an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, 2, \"Delete\"]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-del.textproto b/tests/unit/v1beta1/testdata/update-paths-del.textproto deleted file mode 100644 index 5197a7848..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-del.textproto +++ /dev/null @@ -1,39 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If a field's value is the Delete sentinel, then it doesn't appear in the update -# data, but does in the mask. - -description: "update-paths: Delete" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "\"Delete\"" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto b/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto deleted file mode 100644 index 084e07726..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-exists-precond.textproto +++ /dev/null @@ -1,17 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update method does not support an explicit exists precondition. - -description: "update-paths: Exists precondition is invalid" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - exists: true - > - field_paths: < - field: "a" - > - json_values: "1" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto deleted file mode 100644 index 5c92aeb8c..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-del.textproto +++ /dev/null @@ -1,47 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If one nested field is deleted, and another isn't, preserve the second. - -description: "update-paths: field paths with delete" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "foo" - field: "bar" - > - field_paths: < - field: "foo" - field: "delete" - > - json_values: "1" - json_values: "\"Delete\"" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "foo" - value: < - map_value: < - fields: < - key: "bar" - value: < - integer_value: 1 - > - > - > - > - > - > - update_mask: < - field_paths: "foo.bar" - field_paths: "foo.delete" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto deleted file mode 100644 index a84725a8d..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-dup-transforms.textproto +++ /dev/null @@ -1,23 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The same field cannot occur more than once, even if all the operations are -# transforms. - -description: "update-paths: duplicate field path with only transforms" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "a" - > - json_values: "[\"ArrayUnion\", 1, 2, 3]" - json_values: "\"ServerTimestamp\"" - json_values: "[\"ArrayUnion\", 4, 5, 6]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto deleted file mode 100644 index fedbd3aab..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-dup.textproto +++ /dev/null @@ -1,22 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The same field cannot occur more than once. - -description: "update-paths: duplicate field path" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "a" - > - json_values: "1" - json_values: "2" - json_values: "3" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto deleted file mode 100644 index 7a5df25b7..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-empty-component.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Empty fields are not allowed. - -description: "update-paths: empty field path component" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "*" - field: "" - > - json_values: "1" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto deleted file mode 100644 index 311e30932..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-empty.textproto +++ /dev/null @@ -1,13 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A FieldPath of length zero is invalid. - -description: "update-paths: empty field path" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - > - json_values: "1" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto deleted file mode 100644 index 9ba41e398..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-multi.textproto +++ /dev/null @@ -1,42 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The UpdatePaths or equivalent method takes a list of FieldPaths. Each FieldPath -# is a sequence of uninterpreted path components. - -description: "update-paths: multiple-element field path" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - field: "b" - > - json_values: "1" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - integer_value: 1 - > - > - > - > - > - > - update_mask: < - field_paths: "a.b" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto b/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto deleted file mode 100644 index 516495266..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-fp-nosplit.textproto +++ /dev/null @@ -1,48 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# FieldPath components are not split on dots. - -description: "update-paths: FieldPath elements are not split on dots" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a.b" - field: "f.g" - > - json_values: "{\"n.o\": 7}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a.b" - value: < - map_value: < - fields: < - key: "f.g" - value: < - map_value: < - fields: < - key: "n.o" - value: < - integer_value: 7 - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "`a.b`.`f.g`" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto b/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto deleted file mode 100644 index d9939dc94..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-no-paths.textproto +++ /dev/null @@ -1,10 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# It is a client-side error to call Update with empty data. - -description: "update-paths: no paths" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto deleted file mode 100644 index 1710b9109..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-prefix-1.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another. - -description: "update-paths: prefix #1" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - field: "b" - > - field_paths: < - field: "a" - > - json_values: "1" - json_values: "2" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto deleted file mode 100644 index be78ab58a..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-prefix-2.textproto +++ /dev/null @@ -1,19 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another. - -description: "update-paths: prefix #2" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "a" - field: "b" - > - json_values: "1" - json_values: "2" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto b/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto deleted file mode 100644 index b8a84c9d1..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-prefix-3.textproto +++ /dev/null @@ -1,20 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another, even if the values -# could in principle be combined. - -description: "update-paths: prefix #3" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "a" - field: "d" - > - json_values: "{\"b\": 1}" - json_values: "2" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto b/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto deleted file mode 100644 index 51cb33b31..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-special-chars.textproto +++ /dev/null @@ -1,53 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# FieldPaths can contain special characters. - -description: "update-paths: special characters" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "*" - field: "~" - > - field_paths: < - field: "*" - field: "`" - > - json_values: "1" - json_values: "2" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "*" - value: < - map_value: < - fields: < - key: "`" - value: < - integer_value: 2 - > - > - fields: < - key: "~" - value: < - integer_value: 1 - > - > - > - > - > - > - update_mask: < - field_paths: "`*`.`\\``" - field_paths: "`*`.`~`" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto b/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto deleted file mode 100644 index abc44f55b..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-alone.textproto +++ /dev/null @@ -1,29 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ServerTimestamps, then no update operation -# should be produced. - -description: "update-paths: ServerTimestamp alone" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "\"ServerTimestamp\"" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - set_to_server_value: REQUEST_TIME - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto b/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto deleted file mode 100644 index b0b7df17d..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-multi.textproto +++ /dev/null @@ -1,56 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ServerTimestamp field. Since all the -# ServerTimestamp fields are removed, the only field in the update is "a". - -description: "update-paths: multiple ServerTimestamp fields" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - field_paths: < - field: "c" - > - json_values: "1" - json_values: "\"ServerTimestamp\"" - json_values: "{\"d\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c.d" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto deleted file mode 100644 index 307736831..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-nested.textproto +++ /dev/null @@ -1,49 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A ServerTimestamp value can occur at any depth. In this case, the transform -# applies to the field path "b.c". Since "c" is removed from the update, "b" -# becomes empty, so it is also removed from the update. - -description: "update-paths: nested ServerTimestamp field" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "{\"c\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto deleted file mode 100644 index 2c2cb89b6..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-noarray-nested.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. - -description: "update-paths: ServerTimestamp cannot be anywhere inside an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, {\"b\": \"ServerTimestamp\"}]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto b/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto deleted file mode 100644 index a2baa66f5..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-noarray.textproto +++ /dev/null @@ -1,15 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. Firestore transforms -# don't support array indexing. - -description: "update-paths: ServerTimestamp cannot be in an array value" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "[1, 2, \"ServerTimestamp\"]" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto deleted file mode 100644 index a54a24156..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st-with-empty-map.textproto +++ /dev/null @@ -1,51 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp -# should be stripped out but the empty map should remain. - -description: "update-paths: ServerTimestamp beside an empty map" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - json_values: "{\"b\": {}, \"c\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - map_value: < - > - > - > - > - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-st.textproto b/tests/unit/v1beta1/testdata/update-paths-st.textproto deleted file mode 100644 index 40634c165..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-st.textproto +++ /dev/null @@ -1,49 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with the special ServerTimestamp sentinel is removed from the data in the -# update operation. Instead it appears in a separate Transform operation. Note -# that in these tests, the string "ServerTimestamp" should be replaced with the -# special ServerTimestamp value. - -description: "update-paths: ServerTimestamp with data" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - field_paths: < - field: "a" - > - field_paths: < - field: "b" - > - json_values: "1" - json_values: "\"ServerTimestamp\"" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-paths-uptime.textproto b/tests/unit/v1beta1/testdata/update-paths-uptime.textproto deleted file mode 100644 index 7a15874be..000000000 --- a/tests/unit/v1beta1/testdata/update-paths-uptime.textproto +++ /dev/null @@ -1,40 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update call supports a last-update-time precondition. - -description: "update-paths: last-update-time precondition" -update_paths: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - update_time: < - seconds: 42 - > - > - field_paths: < - field: "a" - > - json_values: "1" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - update_time: < - seconds: 42 - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-prefix-1.textproto b/tests/unit/v1beta1/testdata/update-prefix-1.textproto deleted file mode 100644 index e5c895e73..000000000 --- a/tests/unit/v1beta1/testdata/update-prefix-1.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another. - -description: "update: prefix #1" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a.b\": 1, \"a\": 2}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-prefix-2.textproto b/tests/unit/v1beta1/testdata/update-prefix-2.textproto deleted file mode 100644 index 487017618..000000000 --- a/tests/unit/v1beta1/testdata/update-prefix-2.textproto +++ /dev/null @@ -1,11 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another. - -description: "update: prefix #2" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"a.b\": 2}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-prefix-3.textproto b/tests/unit/v1beta1/testdata/update-prefix-3.textproto deleted file mode 100644 index 0c03b0d6b..000000000 --- a/tests/unit/v1beta1/testdata/update-prefix-3.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In the input data, one field cannot be a prefix of another, even if the values -# could in principle be combined. - -description: "update: prefix #3" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": {\"b\": 1}, \"a.d\": 2}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-quoting.textproto b/tests/unit/v1beta1/testdata/update-quoting.textproto deleted file mode 100644 index 20e530a76..000000000 --- a/tests/unit/v1beta1/testdata/update-quoting.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# In a field path, any component beginning with a non-letter or underscore is -# quoted. - -description: "update: non-letter starting chars are quoted, except underscore" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"_0.1.+2\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "_0" - value: < - map_value: < - fields: < - key: "1" - value: < - map_value: < - fields: < - key: "+2" - value: < - integer_value: 1 - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "_0.`1`.`+2`" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-split-top-level.textproto b/tests/unit/v1beta1/testdata/update-split-top-level.textproto deleted file mode 100644 index d1b0ca0da..000000000 --- a/tests/unit/v1beta1/testdata/update-split-top-level.textproto +++ /dev/null @@ -1,45 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update method splits only top-level keys at dots. Keys at other levels are -# taken literally. - -description: "update: Split on dots for top-level keys only" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"h.g\": {\"j.k\": 6}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "h" - value: < - map_value: < - fields: < - key: "g" - value: < - map_value: < - fields: < - key: "j.k" - value: < - integer_value: 6 - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "h.g" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-split.textproto b/tests/unit/v1beta1/testdata/update-split.textproto deleted file mode 100644 index b96fd6a4f..000000000 --- a/tests/unit/v1beta1/testdata/update-split.textproto +++ /dev/null @@ -1,44 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update method splits top-level keys at dots. - -description: "update: split on dots" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a.b.c\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - map_value: < - fields: < - key: "c" - value: < - integer_value: 1 - > - > - > - > - > - > - > - > - > - update_mask: < - field_paths: "a.b.c" - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st-alone.textproto b/tests/unit/v1beta1/testdata/update-st-alone.textproto deleted file mode 100644 index 0d5ab6e9f..000000000 --- a/tests/unit/v1beta1/testdata/update-st-alone.textproto +++ /dev/null @@ -1,26 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# If the only values in the input are ServerTimestamps, then no update operation -# should be produced. - -description: "update: ServerTimestamp alone" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a" - set_to_server_value: REQUEST_TIME - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st-dot.textproto b/tests/unit/v1beta1/testdata/update-st-dot.textproto deleted file mode 100644 index 19d4d1843..000000000 --- a/tests/unit/v1beta1/testdata/update-st-dot.textproto +++ /dev/null @@ -1,27 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# Like other uses of ServerTimestamp, the data is pruned and the field does not -# appear in the update mask, because it is in the transform. In this case An -# update operation is produced just to hold the precondition. - -description: "update: ServerTimestamp with dotted field" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a.b.c\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.b.c" - set_to_server_value: REQUEST_TIME - > - > - current_document: < - exists: true - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st-multi.textproto b/tests/unit/v1beta1/testdata/update-st-multi.textproto deleted file mode 100644 index 0434cb59a..000000000 --- a/tests/unit/v1beta1/testdata/update-st-multi.textproto +++ /dev/null @@ -1,49 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A document can have more than one ServerTimestamp field. Since all the -# ServerTimestamp fields are removed, the only field in the update is "a". - -# b is not in the mask because it will be set in the transform. c must be in the -# mask: it should be replaced entirely. The transform will set c.d to the -# timestamp, but the update will delete the rest of c. - -description: "update: multiple ServerTimestamp fields" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "c" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - field_transforms: < - field_path: "c.d" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st-nested.textproto b/tests/unit/v1beta1/testdata/update-st-nested.textproto deleted file mode 100644 index f79d9c6a0..000000000 --- a/tests/unit/v1beta1/testdata/update-st-nested.textproto +++ /dev/null @@ -1,42 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A ServerTimestamp value can occur at any depth. In this case, the transform -# applies to the field path "b.c". Since "c" is removed from the update, "b" -# becomes empty, so it is also removed from the update. - -description: "update: nested ServerTimestamp field" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - field_paths: "b" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto b/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto deleted file mode 100644 index 2939dd646..000000000 --- a/tests/unit/v1beta1/testdata/update-st-noarray-nested.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# There cannot be an array value anywhere on the path from the document root to -# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. - -description: "update: ServerTimestamp cannot be anywhere inside an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-st-noarray.textproto b/tests/unit/v1beta1/testdata/update-st-noarray.textproto deleted file mode 100644 index f3879cdf2..000000000 --- a/tests/unit/v1beta1/testdata/update-st-noarray.textproto +++ /dev/null @@ -1,12 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The ServerTimestamp sentinel must be the value of a field. Firestore transforms -# don't support array indexing. - -description: "update: ServerTimestamp cannot be in an array value" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" - is_error: true -> diff --git a/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto b/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto deleted file mode 100644 index 1901de2a1..000000000 --- a/tests/unit/v1beta1/testdata/update-st-with-empty-map.textproto +++ /dev/null @@ -1,48 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# When a ServerTimestamp and a map both reside inside a map, the ServerTimestamp -# should be stripped out but the empty map should remain. - -description: "update: ServerTimestamp beside an empty map" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": {\"b\": {}, \"c\": \"ServerTimestamp\"}}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - map_value: < - fields: < - key: "b" - value: < - map_value: < - > - > - > - > - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "a.c" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-st.textproto b/tests/unit/v1beta1/testdata/update-st.textproto deleted file mode 100644 index 12045a922..000000000 --- a/tests/unit/v1beta1/testdata/update-st.textproto +++ /dev/null @@ -1,42 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# A key with the special ServerTimestamp sentinel is removed from the data in the -# update operation. Instead it appears in a separate Transform operation. Note -# that in these tests, the string "ServerTimestamp" should be replaced with the -# special ServerTimestamp value. - -description: "update: ServerTimestamp with data" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - exists: true - > - > - writes: < - transform: < - document: "projects/projectID/databases/(default)/documents/C/d" - field_transforms: < - field_path: "b" - set_to_server_value: REQUEST_TIME - > - > - > - > -> diff --git a/tests/unit/v1beta1/testdata/update-uptime.textproto b/tests/unit/v1beta1/testdata/update-uptime.textproto deleted file mode 100644 index 66119ac61..000000000 --- a/tests/unit/v1beta1/testdata/update-uptime.textproto +++ /dev/null @@ -1,37 +0,0 @@ -# DO NOT MODIFY. This file was generated by -# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. - -# The Update call supports a last-update-time precondition. - -description: "update: last-update-time precondition" -update: < - doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" - precondition: < - update_time: < - seconds: 42 - > - > - json_data: "{\"a\": 1}" - request: < - database: "projects/projectID/databases/(default)" - writes: < - update: < - name: "projects/projectID/databases/(default)/documents/C/d" - fields: < - key: "a" - value: < - integer_value: 1 - > - > - > - update_mask: < - field_paths: "a" - > - current_document: < - update_time: < - seconds: 42 - > - > - > - > ->