From 476ca563e2db66027a68dac9ce847a22bb8edde0 Mon Sep 17 00:00:00 2001 From: sfarr15 <34426623+sfarr15@users.noreply.github.com> Date: Thu, 26 Mar 2020 11:59:04 +0100 Subject: [PATCH 01/30] Unspecified PDF request option This is the only pagetype not available via tableau api for PDF. The 'Unspecified' pagetype is available for download using tableau online and desktop so I really don't see an issue adding this. Can this kindly be amended as the workaround for this requires downloading an image and converting to pdf which results in poorer quality and larger data size. --- tableauserverclient/server/request_options.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tableauserverclient/server/request_options.py b/tableauserverclient/server/request_options.py index 7e1e6a808..9beea704d 100644 --- a/tableauserverclient/server/request_options.py +++ b/tableauserverclient/server/request_options.py @@ -141,6 +141,7 @@ class PageType: Note = "note" Quarto = "quarto" Tabloid = "tabloid" + Unspecified = "unspecified" class Orientation: Portrait = "portrait" From a0b97042a644b84fc52536a4fcf8f134b3aa6f19 Mon Sep 17 00:00:00 2001 From: Chris Shin Date: Fri, 26 Jun 2020 10:22:17 -0700 Subject: [PATCH 02/30] Adds description to datasource item --- tableauserverclient/models/datasource_item.py | 20 +++++++++++++------ test/assets/datasource_get.xml | 4 ++-- test/assets/datasource_get_by_id.xml | 2 +- test/test_datasource.py | 3 +++ 4 files changed, 20 insertions(+), 9 deletions(-) diff --git a/tableauserverclient/models/datasource_item.py b/tableauserverclient/models/datasource_item.py index e76a42aae..5e63f4e93 100644 --- a/tableauserverclient/models/datasource_item.py +++ b/tableauserverclient/models/datasource_item.py @@ -12,6 +12,7 @@ def __init__(self, project_id, name=None): self._content_url = None self._created_at = None self._datasource_type = None + self._description = None self._id = None self._initial_tags = set() self._project_name = None @@ -86,6 +87,10 @@ def project_name(self): def datasource_type(self): return self._datasource_type + @property + def description(self): + return self._description + @property def updated_at(self): return self._updated_at @@ -100,13 +105,13 @@ def _parse_common_elements(self, datasource_xml, ns): if not isinstance(datasource_xml, ET.Element): datasource_xml = ET.fromstring(datasource_xml).find('.//t:datasource', namespaces=ns) if datasource_xml is not None: - (_, _, _, _, _, updated_at, _, project_id, project_name, owner_id, + (_, _, _, _, _, _, updated_at, _, project_id, project_name, owner_id, certified, certification_note) = self._parse_element(datasource_xml, ns) - self._set_values(None, None, None, None, None, updated_at, None, project_id, + self._set_values(None, None, None, None, None, None, updated_at, None, project_id, project_name, owner_id, certified, certification_note) return self - def _set_values(self, id, name, datasource_type, content_url, created_at, + def _set_values(self, id, name, datasource_type, description, content_url, created_at, updated_at, tags, project_id, project_name, owner_id, certified, certification_note): if id is not None: self._id = id @@ -114,6 +119,8 @@ def _set_values(self, id, name, datasource_type, content_url, created_at, self.name = name if datasource_type: self._datasource_type = datasource_type + if description: + self._description = description if content_url: self._content_url = content_url if created_at: @@ -140,11 +147,11 @@ def from_response(cls, resp, ns): all_datasource_xml = parsed_response.findall('.//t:datasource', namespaces=ns) for datasource_xml in all_datasource_xml: - (id_, name, datasource_type, content_url, created_at, updated_at, + (id_, name, datasource_type, description, content_url, created_at, updated_at, tags, project_id, project_name, owner_id, certified, certification_note) = cls._parse_element(datasource_xml, ns) datasource_item = cls(project_id) - datasource_item._set_values(id_, name, datasource_type, content_url, created_at, updated_at, + datasource_item._set_values(id_, name, datasource_type, description, content_url, created_at, updated_at, tags, None, project_name, owner_id, certified, certification_note) all_datasource_items.append(datasource_item) return all_datasource_items @@ -154,6 +161,7 @@ def _parse_element(datasource_xml, ns): id_ = datasource_xml.get('id', None) name = datasource_xml.get('name', None) datasource_type = datasource_xml.get('type', None) + description = datasource_xml.get('description', None) content_url = datasource_xml.get('contentUrl', None) created_at = parse_datetime(datasource_xml.get('createdAt', None)) updated_at = parse_datetime(datasource_xml.get('updatedAt', None)) @@ -177,5 +185,5 @@ def _parse_element(datasource_xml, ns): if owner_elem is not None: owner_id = owner_elem.get('id', None) - return (id_, name, datasource_type, content_url, created_at, updated_at, tags, project_id, + return (id_, name, datasource_type, description, content_url, created_at, updated_at, tags, project_id, project_name, owner_id, certified, certification_note) diff --git a/test/assets/datasource_get.xml b/test/assets/datasource_get.xml index c3ccfa0da..bb371462a 100644 --- a/test/assets/datasource_get.xml +++ b/test/assets/datasource_get.xml @@ -2,12 +2,12 @@ - + - + diff --git a/test/assets/datasource_get_by_id.xml b/test/assets/datasource_get_by_id.xml index 177899b15..4d7b3ecb8 100644 --- a/test/assets/datasource_get_by_id.xml +++ b/test/assets/datasource_get_by_id.xml @@ -1,6 +1,6 @@ - + diff --git a/test/test_datasource.py b/test/test_datasource.py index 2b7cc623c..fc7169f7a 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -40,6 +40,7 @@ def test_get(self): self.assertEqual(2, pagination_item.total_available) self.assertEqual('e76a1461-3b1d-4588-bf1b-17551a879ad9', all_datasources[0].id) self.assertEqual('dataengine', all_datasources[0].datasource_type) + self.assertEqual('SampleDsDescription', all_datasources[0].description) self.assertEqual('SampleDS', all_datasources[0].content_url) self.assertEqual('2016-08-11T21:22:40Z', format_datetime(all_datasources[0].created_at)) self.assertEqual('2016-08-11T21:34:17Z', format_datetime(all_datasources[0].updated_at)) @@ -50,6 +51,7 @@ def test_get(self): self.assertEqual('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', all_datasources[1].id) self.assertEqual('dataengine', all_datasources[1].datasource_type) + self.assertEqual('description Sample', all_datasources[1].description) self.assertEqual('Sampledatasource', all_datasources[1].content_url) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(all_datasources[1].created_at)) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(all_datasources[1].updated_at)) @@ -80,6 +82,7 @@ def test_get_by_id(self): self.assertEqual('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', single_datasource.id) self.assertEqual('dataengine', single_datasource.datasource_type) + self.assertEqual('abc description xyz', single_datasource.description) self.assertEqual('Sampledatasource', single_datasource.content_url) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(single_datasource.created_at)) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(single_datasource.updated_at)) From 9541ccdd5c50d4ca5c118fc4b7edf279138e3d7f Mon Sep 17 00:00:00 2001 From: Udit Chaudhary Date: Sun, 23 May 2021 19:50:33 +0530 Subject: [PATCH 03/30] feat: accept parameters for metadata api This will allow request parameters like timeout to be set by user. Because the metadata api use post_request endpoint, that endpoint will also support adding request parameters. This feature was already present in the GET endpoint --- tableauserverclient/server/endpoint/endpoint.py | 4 ++-- tableauserverclient/server/endpoint/metadata_endpoint.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tableauserverclient/server/endpoint/endpoint.py b/tableauserverclient/server/endpoint/endpoint.py index dc504242a..c3a0914a7 100644 --- a/tableauserverclient/server/endpoint/endpoint.py +++ b/tableauserverclient/server/endpoint/endpoint.py @@ -106,11 +106,11 @@ def put_request(self, url, xml_request=None, content_type='text/xml'): auth_token=self.parent_srv.auth_token, content_type=content_type) - def post_request(self, url, xml_request, content_type='text/xml'): + def post_request(self, url, xml_request, content_type='text/xml', parameters=None): return self._make_request(self.parent_srv.session.post, url, content=xml_request, auth_token=self.parent_srv.auth_token, - content_type=content_type) + content_type=content_type, parameters=parameters) def api(version): diff --git a/tableauserverclient/server/endpoint/metadata_endpoint.py b/tableauserverclient/server/endpoint/metadata_endpoint.py index ac111d6ef..df29eaeb6 100644 --- a/tableauserverclient/server/endpoint/metadata_endpoint.py +++ b/tableauserverclient/server/endpoint/metadata_endpoint.py @@ -55,7 +55,7 @@ def control_baseurl(self): return "{0}/api/metadata/v1/control".format(self.parent_srv.server_address) @api("3.5") - def query(self, query, variables=None, abort_on_error=False): + def query(self, query, variables=None, abort_on_error=False, parameters=None): logger.info('Querying Metadata API') url = self.baseurl @@ -65,7 +65,7 @@ def query(self, query, variables=None, abort_on_error=False): raise InvalidGraphQLQuery('Must provide a string') # Setting content type because post_reuqest defaults to text/xml - server_response = self.post_request(url, graphql_query, content_type='text/json') + server_response = self.post_request(url, graphql_query, content_type='text/json', parameters=parameters) results = server_response.json() if abort_on_error and results.get('errors', None): From 1a25f5cfde99c6919d6e8de3d3c6599f1352586f Mon Sep 17 00:00:00 2001 From: Udit Chaudhary Date: Thu, 24 Jun 2021 20:17:16 +0530 Subject: [PATCH 04/30] refactor: corrected line length of files --- tableauserverclient/server/endpoint/endpoint.py | 3 +-- tableauserverclient/server/endpoint/metadata_endpoint.py | 4 ++-- tableauserverclient/server/endpoint/permissions_endpoint.py | 2 +- tableauserverclient/server/endpoint/server_info_endpoint.py | 2 +- tableauserverclient/server/request_options.py | 2 +- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tableauserverclient/server/endpoint/endpoint.py b/tableauserverclient/server/endpoint/endpoint.py index 4c5006b33..f7d88b0e6 100644 --- a/tableauserverclient/server/endpoint/endpoint.py +++ b/tableauserverclient/server/endpoint/endpoint.py @@ -118,7 +118,6 @@ def delete_request(self, url): # We don't return anything for a delete self._make_request(self.parent_srv.session.delete, url, auth_token=self.parent_srv.auth_token) - def put_request(self, url, xml_request=None, content_type="text/xml"): return self._make_request( self.parent_srv.session.put, @@ -135,7 +134,7 @@ def post_request(self, url, xml_request, content_type="text/xml", parameters=Non content=xml_request, auth_token=self.parent_srv.auth_token, content_type=content_type, - parameters=parameters + parameters=parameters, ) diff --git a/tableauserverclient/server/endpoint/metadata_endpoint.py b/tableauserverclient/server/endpoint/metadata_endpoint.py index d88f3788b..4e32d26f0 100644 --- a/tableauserverclient/server/endpoint/metadata_endpoint.py +++ b/tableauserverclient/server/endpoint/metadata_endpoint.py @@ -58,7 +58,7 @@ def control_baseurl(self): @api("3.5") def query(self, query, variables=None, abort_on_error=False, parameters=None): - logger.info('Querying Metadata API') + logger.info("Querying Metadata API") url = self.baseurl @@ -68,7 +68,7 @@ def query(self, query, variables=None, abort_on_error=False, parameters=None): raise InvalidGraphQLQuery("Must provide a string") # Setting content type because post_reuqest defaults to text/xml - server_response = self.post_request(url, graphql_query, content_type='text/json', parameters=parameters) + server_response = self.post_request(url, graphql_query, content_type="text/json", parameters=parameters) results = server_response.json() if abort_on_error and results.get("errors", None): diff --git a/tableauserverclient/server/endpoint/permissions_endpoint.py b/tableauserverclient/server/endpoint/permissions_endpoint.py index 7035837f4..5013a0bef 100644 --- a/tableauserverclient/server/endpoint/permissions_endpoint.py +++ b/tableauserverclient/server/endpoint/permissions_endpoint.py @@ -44,7 +44,7 @@ def delete(self, resource, rules): for rule in rules: for capability, mode in rule.capabilities.items(): - " /permissions/groups/group-id/capability-name/capability-mode" + "/permissions/groups/group-id/capability-name/capability-mode" url = "{0}/{1}/permissions/{2}/{3}/{4}/{5}".format( self.owner_baseurl(), resource.id, diff --git a/tableauserverclient/server/endpoint/server_info_endpoint.py b/tableauserverclient/server/endpoint/server_info_endpoint.py index 8776477d3..ca3715fca 100644 --- a/tableauserverclient/server/endpoint/server_info_endpoint.py +++ b/tableauserverclient/server/endpoint/server_info_endpoint.py @@ -17,7 +17,7 @@ def baseurl(self): @api(version="2.4") def get(self): - """ Retrieve the server info for the server. This is an unauthenticated call """ + """Retrieve the server info for the server. This is an unauthenticated call""" try: server_response = self.get_unauthenticated_request(self.baseurl) except ServerResponseError as e: diff --git a/tableauserverclient/server/request_options.py b/tableauserverclient/server/request_options.py index 23d10b3d6..4ebf1e4d6 100644 --- a/tableauserverclient/server/request_options.py +++ b/tableauserverclient/server/request_options.py @@ -98,7 +98,7 @@ def get_query_params(self): class _FilterOptionsBase(RequestOptionsBase): - """ Provide a basic implementation of adding view filters to the url """ + """Provide a basic implementation of adding view filters to the url""" def __init__(self): self.view_filters = [] From 13614e7ced59b8da5d0f2a114b9c59d2a9921deb Mon Sep 17 00:00:00 2001 From: Chris Shin Date: Fri, 23 Jul 2021 09:51:00 -0700 Subject: [PATCH 05/30] Update publish.sh to use python3 (#866) --- publish.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/publish.sh b/publish.sh index 99a3115ec..02812c1c3 100755 --- a/publish.sh +++ b/publish.sh @@ -3,7 +3,6 @@ set -e rm -rf dist -python setup.py sdist -python setup.py bdist_wheel +python3 setup.py sdist python3 setup.py bdist_wheel twine upload dist/* From 7586ab1bed6c9bb84a4e3f29ed3b29db6681ba35 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Mon, 23 Aug 2021 16:18:14 -0700 Subject: [PATCH 06/30] Add handling for workbooks in personal spaces which will not have project ID or Name --- tableauserverclient/models/workbook_item.py | 5 ++++ test/assets/workbook_get_by_id_personal.xml | 13 +++++++++++ test/test_workbook.py | 26 +++++++++++++++++++++ 3 files changed, 44 insertions(+) create mode 100644 test/assets/workbook_get_by_id_personal.xml diff --git a/tableauserverclient/models/workbook_item.py b/tableauserverclient/models/workbook_item.py index 14ca8f33b..9c7e2022e 100644 --- a/tableauserverclient/models/workbook_item.py +++ b/tableauserverclient/models/workbook_item.py @@ -10,6 +10,7 @@ from .permissions_item import PermissionsRule from ..datetime_helpers import parse_datetime import copy +import uuid class WorkbookItem(object): @@ -275,6 +276,10 @@ def from_response(cls, resp, ns): data_acceleration_config, ) = cls._parse_element(workbook_xml, ns) + # workaround for Personal Space workbooks which won't have a project + if not project_id: + project_id = uuid.uuid4() + workbook_item = cls(project_id) workbook_item._set_values( id, diff --git a/test/assets/workbook_get_by_id_personal.xml b/test/assets/workbook_get_by_id_personal.xml new file mode 100644 index 000000000..90cc65e73 --- /dev/null +++ b/test/assets/workbook_get_by_id_personal.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/test/test_workbook.py b/test/test_workbook.py index fc1344b9e..1a6714d19 100644 --- a/test/test_workbook.py +++ b/test/test_workbook.py @@ -20,6 +20,7 @@ ADD_TAGS_XML = os.path.join(TEST_ASSET_DIR, 'workbook_add_tags.xml') GET_BY_ID_XML = os.path.join(TEST_ASSET_DIR, 'workbook_get_by_id.xml') +GET_BY_ID_XML_PERSONAL = os.path.join(TEST_ASSET_DIR, 'workbook_get_by_id_personal.xml') GET_EMPTY_XML = os.path.join(TEST_ASSET_DIR, 'workbook_get_empty.xml') GET_INVALID_DATE_XML = os.path.join(TEST_ASSET_DIR, 'workbook_get_invalid_date.xml') GET_XML = os.path.join(TEST_ASSET_DIR, 'workbook_get.xml') @@ -128,6 +129,31 @@ def test_get_by_id(self): self.assertEqual('ENDANGERED SAFARI', single_workbook.views[0].name) self.assertEqual('SafariSample/sheets/ENDANGEREDSAFARI', single_workbook.views[0].content_url) + def test_get_by_id_personal(self): + # workbooks in personal space don't have project_id or project_name + with open(GET_BY_ID_XML_PERSONAL, 'rb') as f: + response_xml = f.read().decode('utf-8') + with requests_mock.mock() as m: + m.get(self.baseurl + '/3cc6cd06-89ce-4fdc-b935-5294135d6d43', text=response_xml) + single_workbook = self.server.workbooks.get_by_id('3cc6cd06-89ce-4fdc-b935-5294135d6d43') + + self.assertEqual('3cc6cd06-89ce-4fdc-b935-5294135d6d43', single_workbook.id) + self.assertEqual('SafariSample', single_workbook.name) + self.assertEqual('SafariSample', single_workbook.content_url) + self.assertEqual('http://tableauserver/#/workbooks/2/views', single_workbook.webpage_url) + self.assertEqual(False, single_workbook.show_tabs) + self.assertEqual(26, single_workbook.size) + self.assertEqual('2016-07-26T20:34:56Z', format_datetime(single_workbook.created_at)) + self.assertEqual('description for SafariSample', single_workbook.description) + self.assertEqual('2016-07-26T20:35:05Z', format_datetime(single_workbook.updated_at)) + #self.assertIsNone(single_workbook.project_id) + #self.assertIsNone(single_workbook.project_name) + self.assertEqual('5de011f8-5aa9-4d5b-b991-f462c8dd6bb7', single_workbook.owner_id) + self.assertEqual(set(['Safari', 'Sample']), single_workbook.tags) + self.assertEqual('d79634e1-6063-4ec9-95ff-50acbf609ff5', single_workbook.views[0].id) + self.assertEqual('ENDANGERED SAFARI', single_workbook.views[0].name) + self.assertEqual('SafariSample/sheets/ENDANGEREDSAFARI', single_workbook.views[0].content_url) + def test_get_by_id_missing_id(self): self.assertRaises(ValueError, self.server.workbooks.get_by_id, '') From 1903d3270f3e99c1c86084fe359ec6d6fa494ef4 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Mon, 23 Aug 2021 16:33:00 -0700 Subject: [PATCH 07/30] Improve tests to show that project_id should be set to something, but project_name is expected to not --- test/test_workbook.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_workbook.py b/test/test_workbook.py index 1a6714d19..d3a3b59b4 100644 --- a/test/test_workbook.py +++ b/test/test_workbook.py @@ -146,8 +146,8 @@ def test_get_by_id_personal(self): self.assertEqual('2016-07-26T20:34:56Z', format_datetime(single_workbook.created_at)) self.assertEqual('description for SafariSample', single_workbook.description) self.assertEqual('2016-07-26T20:35:05Z', format_datetime(single_workbook.updated_at)) - #self.assertIsNone(single_workbook.project_id) - #self.assertIsNone(single_workbook.project_name) + self.assertTrue(single_workbook.project_id) + self.assertIsNone(single_workbook.project_name) self.assertEqual('5de011f8-5aa9-4d5b-b991-f462c8dd6bb7', single_workbook.owner_id) self.assertEqual(set(['Safari', 'Sample']), single_workbook.tags) self.assertEqual('d79634e1-6063-4ec9-95ff-50acbf609ff5', single_workbook.views[0].id) From a910cbbd2274bcc2444205e4223f948ba711d806 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Fri, 5 Mar 2021 14:06:35 -0800 Subject: [PATCH 08/30] Add issue template --- .github/ISSUE_TEMPLATE/bug_report.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..a199226df --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,25 @@ +--- +name: Bug report +about: Create a bug report or request for help +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**Versions** +Details of your environment, including: + - Tableau Server version (or note if using Tableau Online) + - Python version + - TSC library version + +**To Reproduce** +Steps to reproduce the behavior. Please include a code snippet where possible. + +**Results** +What are the results or error messages received? + +**NOTE:** Be careful not to post user names, passwords, auth tokens or any other private or sensitive information. From 09917f10c096eb5acaf7bfeaf5de5c5eca130c91 Mon Sep 17 00:00:00 2001 From: Jac Date: Mon, 24 May 2021 13:15:49 -0700 Subject: [PATCH 09/30] Create slack.yml Created a new action from https://github.com/marketplace/actions/send-message-to-slack --- .github/workflows/slack.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .github/workflows/slack.yml diff --git a/.github/workflows/slack.yml b/.github/workflows/slack.yml new file mode 100644 index 000000000..7d9052bfd --- /dev/null +++ b/.github/workflows/slack.yml @@ -0,0 +1,18 @@ +name: 💬 Send Message to Slack 🚀 + +on: [push, pull_request, issues] + +jobs: + slack-notifications: + runs-on: ubuntu-20.04 + name: Sends a message to Slack when a push, a pull request or an issue is made + steps: + - name: Send message to Slack API + uses: archive/github-actions-slack@v2.0.1 + id: notify + with: + slack-bot-user-oauth-access-token: ${{ secrets.SLACK_BOT_USER_OAUTH_ACCESS_TOKEN }} + slack-channel: C019HCX84L9 + slack-text: Hello! Event "${{ github.event_name }}" in "${{ github.repository }}" 🤓 + - name: Result from "Send Message" + run: echo "The result was ${{ steps.notify.outputs.slack-result }}" From 00a41fb38fb6cd6e8d57edac4103d82017c201f5 Mon Sep 17 00:00:00 2001 From: Jac Date: Mon, 24 May 2021 13:34:07 -0700 Subject: [PATCH 10/30] whitespace change to re-try PR --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1aed88d61..a5445e052 100644 --- a/README.md +++ b/README.md @@ -12,3 +12,4 @@ This repository contains Python source code and sample files. Python versions 3. For more information on installing and using TSC, see the documentation: + From 6ebf334dace5815a957e979d25d7c09a7cb0f597 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Wed, 1 Sep 2021 14:22:53 -0700 Subject: [PATCH 11/30] Upgrade to newer Slack action provider --- .github/workflows/slack.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/slack.yml b/.github/workflows/slack.yml index 7d9052bfd..c3b17e8c4 100644 --- a/.github/workflows/slack.yml +++ b/.github/workflows/slack.yml @@ -8,7 +8,7 @@ jobs: name: Sends a message to Slack when a push, a pull request or an issue is made steps: - name: Send message to Slack API - uses: archive/github-actions-slack@v2.0.1 + uses: archive/github-actions-slack@v2.2.2 id: notify with: slack-bot-user-oauth-access-token: ${{ secrets.SLACK_BOT_USER_OAUTH_ACCESS_TOKEN }} From 1ea39c2e77b8522fb60362ba3ce81eb195995a84 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Wed, 15 Sep 2021 12:45:59 -0700 Subject: [PATCH 12/30] Add Mac and Win to PR testing pipeline Also add support for Pythong 3.10 RC --- .github/workflows/run-tests.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 45b9548c1..9a51ac7a9 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -1,20 +1,21 @@ -name: Python package +name: Python tests on: [push] jobs: build: - - runs-on: ubuntu-latest strategy: fail-fast: false matrix: - python-version: [3.5, 3.6, 3.7, 3.8, 3.9] + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.5, 3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2] + + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python ${{ matrix.python-version }} on ${{ matrix.os }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} From b67a80426d4a9143981ecd530f6b3e6e3b0313a5 Mon Sep 17 00:00:00 2001 From: Tyler Doyle Date: Wed, 22 Sep 2021 17:18:07 -0700 Subject: [PATCH 13/30] Revert slack.yml to unblock open PRs I don't have the time to figure out a fix for the moment, this might be the fastest way --- .github/workflows/slack.yml | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 .github/workflows/slack.yml diff --git a/.github/workflows/slack.yml b/.github/workflows/slack.yml deleted file mode 100644 index c3b17e8c4..000000000 --- a/.github/workflows/slack.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: 💬 Send Message to Slack 🚀 - -on: [push, pull_request, issues] - -jobs: - slack-notifications: - runs-on: ubuntu-20.04 - name: Sends a message to Slack when a push, a pull request or an issue is made - steps: - - name: Send message to Slack API - uses: archive/github-actions-slack@v2.2.2 - id: notify - with: - slack-bot-user-oauth-access-token: ${{ secrets.SLACK_BOT_USER_OAUTH_ACCESS_TOKEN }} - slack-channel: C019HCX84L9 - slack-text: Hello! Event "${{ github.event_name }}" in "${{ github.repository }}" 🤓 - - name: Result from "Send Message" - run: echo "The result was ${{ steps.notify.outputs.slack-result }}" From cd3668541d8c0f071d08f26f207aea7701e6b4dc Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 23 Sep 2021 11:14:53 +0200 Subject: [PATCH 14/30] Extend `publish_datasource.py` sample to allow specifying a project name (#888) I don't have access to the `Default` project on my Tableau server. Still I want to be able to run this sample... --- samples/publish_datasource.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/samples/publish_datasource.py b/samples/publish_datasource.py index fa0fe2a95..9c0099ac6 100644 --- a/samples/publish_datasource.py +++ b/samples/publish_datasource.py @@ -35,6 +35,7 @@ def main(): parser.add_argument('--filepath', '-f', required=True, help='filepath to the datasource to publish') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + parser.add_argument('--project', help='Project within which to publish the datasource') parser.add_argument('--async', '-a', help='Publishing asynchronously', dest='async_', action='store_true') parser.add_argument('--conn-username', help='connection username') parser.add_argument('--conn-password', help='connection password') @@ -55,9 +56,22 @@ def main(): tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): - # Create a new datasource item to publish - empty project_id field - # will default the publish to the site's default project - new_datasource = TSC.DatasourceItem(project_id="") + # Empty project_id field will default the publish to the site's default project + project_id = "" + + # Retrieve the project id, if a project name was passed + if args.project is not None: + req_options = TSC.RequestOptions() + req_options.filter.add(TSC.Filter(TSC.RequestOptions.Field.Name, + TSC.RequestOptions.Operator.Equals, + args.project)) + projects = list(TSC.Pager(server.projects, req_options)) + if len(projects) > 1: + raise ValueError("The project name is not unique") + project_id = projects[0].id + + # Create a new datasource item to publish + new_datasource = TSC.DatasourceItem(project_id=project_id) # Create a connection_credentials item if connection details are provided new_conn_creds = None From df481ffc7da27a7d0d2fe06564de8b2447174fe3 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Sat, 18 Sep 2021 11:21:53 +0200 Subject: [PATCH 15/30] Tests: Verify `datasources.refresh` to return the scheduled job --- test/test_datasource.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/test/test_datasource.py b/test/test_datasource.py index e221f0c88..42d1dfade 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -333,7 +333,13 @@ def test_refresh_id(self): with requests_mock.mock() as m: m.post(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/refresh', status_code=202, text=response_xml) - self.server.datasources.refresh('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb') + new_job = self.server.datasources.refresh('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb') + + self.assertEqual('7c3d599e-949f-44c3-94a1-f30ba85757e4', new_job.id) + self.assertEqual('RefreshExtract', new_job.type) + self.assertEqual(None, new_job.progress) + self.assertEqual('2020-03-05T22:05:32Z', format_datetime(new_job.created_at)) + self.assertEqual(-1, new_job.finish_code) def test_refresh_object(self): self.server.version = '2.8' @@ -344,7 +350,10 @@ def test_refresh_object(self): with requests_mock.mock() as m: m.post(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/refresh', status_code=202, text=response_xml) - self.server.datasources.refresh(datasource) + new_job = self.server.datasources.refresh(datasource) + + # We only check the `id`; remaining fields are already tested in `test_refresh_id` + self.assertEqual('7c3d599e-949f-44c3-94a1-f30ba85757e4', new_job.id) def test_delete(self): with requests_mock.mock() as m: From b0e9abf5a688c6dca991b2b0bfc19d6dc8711bb9 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Sat, 18 Sep 2021 16:05:33 +0200 Subject: [PATCH 16/30] Remove dead code from `datasources.publish` --- tableauserverclient/server/endpoint/datasources_endpoint.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index ccdbfa0d1..7b80c2b2b 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -282,10 +282,6 @@ def publish( new_datasource = DatasourceItem.from_response(server_response.content, self.parent_srv.namespace)[0] logger.info("Published {0} (ID: {1})".format(filename, new_datasource.id)) return new_datasource - server_response = self.post_request(url, xml_request, content_type) - new_datasource = DatasourceItem.from_response(server_response.content, self.parent_srv.namespace)[0] - logger.info("Published {0} (ID: {1})".format(filename, new_datasource.id)) - return new_datasource @api(version="2.0") def populate_permissions(self, item): From 1c802ee3bcb0505d861b9828472bfac48cc50214 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Sat, 18 Sep 2021 22:56:28 +0200 Subject: [PATCH 17/30] Use correct JSON Mimetype The official MIME type for JSON is `application/json`, not `text/json`. --- tableauserverclient/server/endpoint/metadata_endpoint.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/server/endpoint/metadata_endpoint.py b/tableauserverclient/server/endpoint/metadata_endpoint.py index 368a92a97..421c80f95 100644 --- a/tableauserverclient/server/endpoint/metadata_endpoint.py +++ b/tableauserverclient/server/endpoint/metadata_endpoint.py @@ -67,7 +67,7 @@ def query(self, query, variables=None, abort_on_error=False): raise InvalidGraphQLQuery("Must provide a string") # Setting content type because post_reuqest defaults to text/xml - server_response = self.post_request(url, graphql_query, content_type="text/json") + server_response = self.post_request(url, graphql_query, content_type="application/json") results = server_response.json() if abort_on_error and results.get("errors", None): @@ -112,7 +112,7 @@ def paginated_query(self, query, variables=None, abort_on_error=False): paginated_results = results_dict["pages"] # get first page - server_response = self.post_request(url, graphql_query, content_type="text/json") + server_response = self.post_request(url, graphql_query, content_type="application/json") results = server_response.json() if abort_on_error and results.get("errors", None): @@ -129,7 +129,7 @@ def paginated_query(self, query, variables=None, abort_on_error=False): # make the call logger.debug("Calling Token: " + cursor) graphql_query = json.dumps({"query": query, "variables": variables}) - server_response = self.post_request(url, graphql_query, content_type="text/json") + server_response = self.post_request(url, graphql_query, content_type="application/json") results = server_response.json() # verify response if abort_on_error and results.get("errors", None): From 74bec027ca1c3975b73ad88415643c761da3f16b Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Sat, 18 Sep 2021 14:30:19 +0200 Subject: [PATCH 18/30] Unify arguments of sample scripts I am pretty new to TSC, and wanted to run some sample scripts to get an understanding of the library. Doing so, I realized that every sample had a slightly different command line, even for common arguments: * Some expected `site`, some `site-id`, some were lacking site-support completely (and thereby unusable for Tableau Online) * Some had a short option `-i`, some had the short option `-S` for the site name * Some expected password-based authentication, some expected personal access tokens This commit fixes all those inconsistencies, so that users don't have to re-learn the command line options for each individual script. --- samples/add_default_permission.py | 24 +++++++++++------------- samples/create_group.py | 17 +++++++++++------ samples/create_project.py | 23 ++++++++++------------- samples/create_schedules.py | 17 +++++++++++------ samples/download_view_image.py | 29 ++++++++++++----------------- samples/explore_datasource.py | 20 +++++++++++--------- samples/explore_webhooks.py | 30 +++++++++++------------------- samples/explore_workbook.py | 28 ++++++++++++++-------------- samples/export.py | 19 ++++++++----------- samples/export_wb.py | 21 +++++++++------------ samples/filter_sort_groups.py | 25 +++++++++++-------------- samples/filter_sort_projects.py | 23 ++++++++++------------- samples/initialize_server.py | 24 +++++++++++++----------- samples/kill_all_jobs.py | 21 +++++++++------------ samples/list.py | 21 +++++++++------------ samples/login.py | 19 ++++++++++--------- samples/move_workbook_projects.py | 20 +++++++++++--------- samples/move_workbook_sites.py | 18 +++++++++++------- samples/pagination_sample.py | 21 +++++++++++---------- samples/publish_datasource.py | 6 ++++-- samples/publish_workbook.py | 23 ++++++++++++----------- samples/query_permissions.py | 22 +++++++++------------- samples/refresh.py | 21 ++++++++------------- samples/refresh_tasks.py | 23 +++++++++-------------- samples/set_http_options.py | 14 +++++++++----- samples/set_refresh_schedule.py | 17 ++++++++--------- samples/update_connection.py | 21 ++++++++------------- 27 files changed, 270 insertions(+), 297 deletions(-) diff --git a/samples/add_default_permission.py b/samples/add_default_permission.py index 63c38f53d..77ad58a11 100644 --- a/samples/add_default_permission.py +++ b/samples/add_default_permission.py @@ -10,7 +10,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -18,27 +17,26 @@ def main(): parser = argparse.ArgumentParser(description='Add workbook default permissions for a given project.') - parser.add_argument('--server', '-s', required=True, help='Server address') - parser.add_argument('--username', '-u', required=True, help='Username to sign into server') - parser.add_argument('--site', '-S', default=None, help='Site to sign into - default site if not provided') - parser.add_argument('-p', default=None, help='Password to sign into server') - + # Common options; please keep those in sync across all samples + parser.add_argument('--server', '-s', required=True, help='server address') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # Sign in - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + # Sign in to server + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): diff --git a/samples/create_group.py b/samples/create_group.py index 7f9dc1e96..4459eb96a 100644 --- a/samples/create_group.py +++ b/samples/create_group.py @@ -7,7 +7,6 @@ import argparse -import getpass import logging from datetime import time @@ -18,20 +17,26 @@ def main(): parser = argparse.ArgumentParser(description='Creates a sample user group.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - args = parser.parse_args() + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here - password = getpass.getpass("Password: ") + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): group = TSC.GroupItem('test') group = server.groups.create(group) diff --git a/samples/create_project.py b/samples/create_project.py index 0380cb8a0..b3b28c2dc 100644 --- a/samples/create_project.py +++ b/samples/create_project.py @@ -8,7 +8,6 @@ #### import argparse -import getpass import logging import sys @@ -27,28 +26,26 @@ def create_project(server, project_item): def main(): parser = argparse.ArgumentParser(description='Create new projects.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None, help='password') - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Use highest Server REST API version available server.use_server_version() diff --git a/samples/create_schedules.py b/samples/create_schedules.py index c1bcb712f..3c2627bf6 100644 --- a/samples/create_schedules.py +++ b/samples/create_schedules.py @@ -7,7 +7,6 @@ import argparse -import getpass import logging from datetime import time @@ -18,20 +17,26 @@ def main(): parser = argparse.ArgumentParser(description='Creates sample schedules for each type of frequency.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - args = parser.parse_args() + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here - password = getpass.getpass("Password: ") + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Hourly Schedule # This schedule will run every 2 hours between 2:30AM and 11:00PM diff --git a/samples/download_view_image.py b/samples/download_view_image.py index 07162eebf..17cc2000b 100644 --- a/samples/download_view_image.py +++ b/samples/download_view_image.py @@ -9,7 +9,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -18,34 +17,30 @@ def main(): parser = argparse.ArgumentParser(description='Download image of a specified view.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--site-id', '-si', required=False, - help='content url for site the view is on') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--view-name', '-v', required=True, + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') + parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', + help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--view-name', '-vn', required=True, help='name of view to download an image of') parser.add_argument('--filepath', '-f', required=True, help='filepath to save the image returned') parser.add_argument('--maxage', '-m', required=False, help='max age of the image in the cache in minutes.') - parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', - help='desired logging level (set to error by default)') args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Sign in to server. - site_id = args.site_id - if not site_id: - site_id = "" - tableau_auth = TSC.TableauAuth(args.username, password, site_id=site_id) - server = TSC.Server(args.server) - # The new endpoint was introduced in Version 2.5 - server.version = "2.5" - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Step 2: Query for the view that we want an image of req_option = TSC.RequestOptions() diff --git a/samples/explore_datasource.py b/samples/explore_datasource.py index e740d60f1..a78345122 100644 --- a/samples/explore_datasource.py +++ b/samples/explore_datasource.py @@ -10,7 +10,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -19,25 +18,28 @@ def main(): parser = argparse.ArgumentParser(description='Explore datasource functions supported by the Server API.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--publish', '-p', metavar='FILEPATH', help='path to datasource to publish') - parser.add_argument('--download', '-d', metavar='FILEPATH', help='path to save downloaded datasource') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--publish', metavar='FILEPATH', help='path to datasource to publish') + parser.add_argument('--download', metavar='FILEPATH', help='path to save downloaded datasource') args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - server.use_highest_version() + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Query projects for use when demonstrating publishing and updating all_projects, pagination_item = server.projects.get() diff --git a/samples/explore_webhooks.py b/samples/explore_webhooks.py index ab94f7195..50c677cba 100644 --- a/samples/explore_webhooks.py +++ b/samples/explore_webhooks.py @@ -10,7 +10,6 @@ #### import argparse -import getpass import logging import os.path @@ -20,35 +19,28 @@ def main(): parser = argparse.ArgumentParser(description='Explore webhook functions supported by the Server API.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None, help='password') - parser.add_argument('--create', '-c', help='create a webhook') - parser.add_argument('--delete', '-d', help='delete a webhook', action='store_true') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--create', help='create a webhook') + parser.add_argument('--delete', help='delete a webhook', action='store_true') args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password, args.site) - print("Signing in to " + args.server + " [" + args.site + "] as " + args.username) - server = TSC.Server(args.server) - - # Set http options to disable verifying SSL - server.add_http_options({'verify': False}) - - server.use_server_version() - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Create webhook if create flag is set (-create, -c) diff --git a/samples/explore_workbook.py b/samples/explore_workbook.py index 88eebc1a3..8746db80e 100644 --- a/samples/explore_workbook.py +++ b/samples/explore_workbook.py @@ -10,7 +10,6 @@ #### import argparse -import getpass import logging import os.path @@ -20,33 +19,34 @@ def main(): parser = argparse.ArgumentParser(description='Explore workbook functions supported by the Server API.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--publish', '-p', metavar='FILEPATH', help='path to workbook to publish') - parser.add_argument('--download', '-d', metavar='FILEPATH', help='path to save downloaded workbook') - parser.add_argument('--preview-image', '-i', metavar='FILENAME', - help='filename (a .png file) to save the preview image') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--publish', metavar='FILEPATH', help='path to workbook to publish') + parser.add_argument('--download', metavar='FILEPATH', help='path to save downloaded workbook') + parser.add_argument('--preview-image', '-i', metavar='FILENAME', + help='filename (a .png file) to save the preview image') args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - server.use_highest_version() - - overwrite_true = TSC.Server.PublishMode.Overwrite - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Publish workbook if publish flag is set (-publish, -p) + overwrite_true = TSC.Server.PublishMode.Overwrite if args.publish: all_projects, pagination_item = server.projects.get() default_project = next((project for project in all_projects if project.is_default()), None) diff --git a/samples/export.py b/samples/export.py index b8cd01140..2b6de57f9 100644 --- a/samples/export.py +++ b/samples/export.py @@ -6,7 +6,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -14,13 +13,16 @@ def main(): parser = argparse.ArgumentParser(description='Export a view as an image, PDF, or CSV') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None) - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--pdf', dest='type', action='store_const', const=('populate_pdf', 'PDFRequestOptions', 'pdf', 'pdf')) @@ -36,16 +38,11 @@ def main(): args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): views = filter(lambda x: x.id == args.resource_id, diff --git a/samples/export_wb.py b/samples/export_wb.py index 334d57c89..a9b4d60be 100644 --- a/samples/export_wb.py +++ b/samples/export_wb.py @@ -9,7 +9,6 @@ import argparse -import getpass import logging import tempfile import shutil @@ -52,23 +51,21 @@ def cleanup(tempdir): def main(): parser = argparse.ArgumentParser(description='Export to PDF all of the views in a workbook.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--site', '-S', default=None, help='Site to log into, do not specify for default site') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--password', '-p', default=None, help='password for the user') - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample parser.add_argument('--file', '-f', default='out.pdf', help='filename to store the exported data') parser.add_argument('resource_id', help='LUID for the workbook') args = parser.parse_args() - if args.password is None: - password = getpass.getpass("Password: ") - else: - password = args.password - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) @@ -76,9 +73,9 @@ def main(): tempdir = tempfile.mkdtemp('tsc') logging.debug("Saving to tempdir: %s", tempdir) - tableau_auth = TSC.TableauAuth(args.username, password, args.site) - server = TSC.Server(args.server, use_server_version=True) try: + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): get_list = functools.partial(get_views_for_workbook, server) download = functools.partial(download_pdf, server, tempdir) diff --git a/samples/filter_sort_groups.py b/samples/filter_sort_groups.py index f8123a29c..7f160f66d 100644 --- a/samples/filter_sort_groups.py +++ b/samples/filter_sort_groups.py @@ -7,7 +7,6 @@ import argparse -import getpass import logging import tableauserverclient as TSC @@ -25,30 +24,28 @@ def create_example_group(group_name='Example Group', server=None): def main(): parser = argparse.ArgumentParser(description='Filter and sort groups.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - parser.add_argument('-p', default=None) - args = parser.parse_args() + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): - # Determine and use the highest api version for the server - server.use_server_version() - group_name = 'SALES NORTHWEST' # Try to create a group named "SALES NORTHWEST" create_example_group(group_name, server) diff --git a/samples/filter_sort_projects.py b/samples/filter_sort_projects.py index 0c62614b0..e4f695fda 100644 --- a/samples/filter_sort_projects.py +++ b/samples/filter_sort_projects.py @@ -6,7 +6,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -26,28 +25,26 @@ def create_example_project(name='Example Project', content_permissions='LockedTo def main(): parser = argparse.ArgumentParser(description='Filter and sort projects.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None) - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Use highest Server REST API version available server.use_server_version() diff --git a/samples/initialize_server.py b/samples/initialize_server.py index a3e312ce9..a7dd552e1 100644 --- a/samples/initialize_server.py +++ b/samples/initialize_server.py @@ -5,7 +5,6 @@ #### import argparse -import getpass import glob import logging import tableauserverclient as TSC @@ -13,17 +12,21 @@ def main(): parser = argparse.ArgumentParser(description='Initialize a server with content.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--datasources-folder', '-df', required=True, help='folder containing datasources') - parser.add_argument('--workbooks-folder', '-wf', required=True, help='folder containing workbooks') - parser.add_argument('--site-id', '-sid', required=False, default='', help='site id of the site to use') - parser.add_argument('--project', '-p', required=False, default='Default', help='project to use') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - args = parser.parse_args() + # Options specific to this sample + parser.add_argument('--datasources-folder', '-df', required=True, help='folder containing datasources') + parser.add_argument('--workbooks-folder', '-wf', required=True, help='folder containing workbooks') + parser.add_argument('--project', required=False, default='Default', help='project to use') - password = getpass.getpass("Password: ") + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) @@ -32,9 +35,8 @@ def main(): ################################################################################ # Step 1: Sign in to server. ################################################################################ - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): ################################################################################ diff --git a/samples/kill_all_jobs.py b/samples/kill_all_jobs.py index 1aeb7298e..f9fa173e5 100644 --- a/samples/kill_all_jobs.py +++ b/samples/kill_all_jobs.py @@ -5,7 +5,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -13,27 +12,25 @@ def main(): parser = argparse.ArgumentParser(description='Cancel all of the running background jobs.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--site', '-S', default=None, help='site to log into, do not specify for default site') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--password', '-p', default=None, help='password for the user') - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - if args.password is None: - password = getpass.getpass("Password: ") - else: - password = args.password - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): req = TSC.RequestOptions() diff --git a/samples/list.py b/samples/list.py index 10e11ac04..8a6407e0d 100644 --- a/samples/list.py +++ b/samples/list.py @@ -5,7 +5,6 @@ #### import argparse -import getpass import logging import os import sys @@ -15,28 +14,26 @@ def main(): parser = argparse.ArgumentParser(description='List out the names and LUIDs for different resource types.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--site', '-S', default="", help='site to log into, do not specify for default site') - parser.add_argument('--token-name', '-n', required=True, help='username to signin under') - parser.add_argument('--token', '-t', help='personal access token for logging in') - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-n', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - + # Options specific to this sample parser.add_argument('resource_type', choices=['workbook', 'datasource', 'project', 'view', 'job', 'webhooks']) args = parser.parse_args() - token = os.environ.get('TOKEN', args.token) - if not token: - print("--token or TOKEN environment variable needs to be set") - sys.exit(1) # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # SIGN IN - tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, token, site_id=args.site) + # Sign in to server + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): endpoint = { diff --git a/samples/login.py b/samples/login.py index 29e02e14e..eec967e8d 100644 --- a/samples/login.py +++ b/samples/login.py @@ -13,16 +13,17 @@ def main(): parser = argparse.ArgumentParser(description='Logs in to the server.') - + # This command is special, as it doesn't take `token-value` and it offer both token-based and password based authentication. + # Please still try to keep common options like `server` and `site` consistent across samples + # Common options: + parser.add_argument('--server', '-s', required=True, help='server address') + parser.add_argument('--site', '-S', help='site name') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - - parser.add_argument('--server', '-s', required=True, help='server address') - + # Options specific to this sample group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--username', '-u', help='username to sign into the server') group.add_argument('--token-name', '-n', help='name of the personal access token used to sign into the server') - parser.add_argument('--sitename', '-S', default='') args = parser.parse_args() @@ -37,8 +38,8 @@ def main(): # Trying to authenticate using username and password. password = getpass.getpass("Password: ") - print("\nSigning in...\nServer: {}\nSite: {}\nUsername: {}".format(args.server, args.sitename, args.username)) - tableau_auth = TSC.TableauAuth(args.username, password, site_id=args.sitename) + print("\nSigning in...\nServer: {}\nSite: {}\nUsername: {}".format(args.server, args.site, args.username)) + tableau_auth = TSC.TableauAuth(args.username, password, site_id=args.site) with server.auth.sign_in(tableau_auth): print('Logged in successfully') @@ -47,9 +48,9 @@ def main(): personal_access_token = getpass.getpass("Personal Access Token: ") print("\nSigning in...\nServer: {}\nSite: {}\nToken name: {}" - .format(args.server, args.sitename, args.token_name)) + .format(args.server, args.site, args.token_name)) tableau_auth = TSC.PersonalAccessTokenAuth(token_name=args.token_name, - personal_access_token=personal_access_token, site_id=args.sitename) + personal_access_token=personal_access_token, site_id=args.site) with server.auth.sign_in_with_personal_access_token(tableau_auth): print('Logged in successfully') diff --git a/samples/move_workbook_projects.py b/samples/move_workbook_projects.py index c31425f25..62189370c 100644 --- a/samples/move_workbook_projects.py +++ b/samples/move_workbook_projects.py @@ -8,7 +8,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -17,25 +16,28 @@ def main(): parser = argparse.ArgumentParser(description='Move one workbook from the default project to another.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--workbook-name', '-w', required=True, help='name of workbook to move') - parser.add_argument('--destination-project', '-d', required=True, help='name of project to move workbook into') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--workbook-name', '-w', required=True, help='name of workbook to move') + parser.add_argument('--destination-project', '-d', required=True, help='name of project to move workbook into') args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Sign in to server - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Step 2: Query workbook to move req_option = TSC.RequestOptions() diff --git a/samples/move_workbook_sites.py b/samples/move_workbook_sites.py index 08bde0ec6..8a97031a9 100644 --- a/samples/move_workbook_sites.py +++ b/samples/move_workbook_sites.py @@ -8,7 +8,6 @@ #### import argparse -import getpass import logging import shutil import tempfile @@ -21,23 +20,28 @@ def main(): parser = argparse.ArgumentParser(description="Move one workbook from the" "default project of the default site to" "the default project of another site.") + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--workbook-name', '-w', required=True, help='name of workbook to move') - parser.add_argument('--destination-site', '-d', required=True, help='name of site to move workbook into') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--workbook-name', '-w', required=True, help='name of workbook to move') + parser.add_argument('--destination-site', '-d', required=True, help='name of site to move workbook into') - args = parser.parse_args() - password = getpass.getpass("Password: ") + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Sign in to both sites on server - tableau_auth = TSC.TableauAuth(args.username, password) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) source_server = TSC.Server(args.server) dest_server = TSC.Server(args.server) diff --git a/samples/pagination_sample.py b/samples/pagination_sample.py index 6779023ba..2ebd011dc 100644 --- a/samples/pagination_sample.py +++ b/samples/pagination_sample.py @@ -10,7 +10,6 @@ #### import argparse -import getpass import logging import os.path @@ -20,26 +19,28 @@ def main(): parser = argparse.ArgumentParser(description='Demonstrate pagination on the list of workbooks on the server.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-n', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # SIGN IN - - tableau_auth = TSC.TableauAuth(args.username, password) - server = TSC.Server(args.server) - + # Sign in to server + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): - # Pager returns a generator that yields one item at a time fetching # from Server only when necessary. Pager takes a server Endpoint as its # first parameter. It will call 'get' on that endpoint. To get workbooks diff --git a/samples/publish_datasource.py b/samples/publish_datasource.py index 9c0099ac6..0d7f936c2 100644 --- a/samples/publish_datasource.py +++ b/samples/publish_datasource.py @@ -26,15 +26,17 @@ def main(): parser = argparse.ArgumentParser(description='Publish a datasource to server.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--site', '-i', help='site name') + parser.add_argument('--site', '-S', help='site name') parser.add_argument('--token-name', '-p', required=True, help='name of the personal access token used to sign into the server') parser.add_argument('--token-value', '-v', required=True, help='value of the personal access token used to sign into the server') - parser.add_argument('--filepath', '-f', required=True, help='filepath to the datasource to publish') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--file', '-f', required=True, help='filepath to the datasource to publish') parser.add_argument('--project', help='Project within which to publish the datasource') parser.add_argument('--async', '-a', help='Publishing asynchronously', dest='async_', action='store_true') parser.add_argument('--conn-username', help='connection username') diff --git a/samples/publish_workbook.py b/samples/publish_workbook.py index ca366cf9e..58a158b12 100644 --- a/samples/publish_workbook.py +++ b/samples/publish_workbook.py @@ -15,7 +15,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -25,29 +24,30 @@ def main(): parser = argparse.ArgumentParser(description='Publish a workbook to server.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--filepath', '-f', required=True, help='computer filepath of the workbook to publish') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('--file', '-f', required=True, help='local filepath of the workbook to publish') parser.add_argument('--as-job', '-a', help='Publishing asynchronously', action='store_true') parser.add_argument('--skip-connection-check', '-c', help='Skip live connection check', action='store_true') - parser.add_argument('--site', '-S', default='', help='id (contentUrl) of site to sign into') - args = parser.parse_args() - password = getpass.getpass("Password: ") + args = parser.parse_args() # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Sign in to server. - tableau_auth = TSC.TableauAuth(args.username, password, site_id=args.site) - server = TSC.Server(args.server) - - overwrite_true = TSC.Server.PublishMode.Overwrite - + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): # Step 2: Get all the projects on server, then look for the default one. @@ -68,6 +68,7 @@ def main(): all_connections.append(connection2) # Step 3: If default project is found, form a new workbook item and publish. + overwrite_true = TSC.Server.PublishMode.Overwrite if default_project is not None: new_workbook = TSC.WorkbookItem(default_project.id) if args.as_job: diff --git a/samples/query_permissions.py b/samples/query_permissions.py index a253adc9a..457d534ec 100644 --- a/samples/query_permissions.py +++ b/samples/query_permissions.py @@ -7,7 +7,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -15,30 +14,27 @@ def main(): parser = argparse.ArgumentParser(description='Query permissions of a given resource.') - parser.add_argument('--server', '-s', required=True, help='Server address') - parser.add_argument('--username', '-u', required=True, help='Username to sign into server') - parser.add_argument('--site', '-S', default=None, help='Site to sign into - default site if not provided') - parser.add_argument('-p', default=None, help='Password to sign into server') - + # Common options; please keep those in sync across all samples + parser.add_argument('--server', '-s', required=True, help='server address') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - + # Options specific to this sample parser.add_argument('resource_type', choices=['workbook', 'datasource', 'flow', 'table', 'database']) parser.add_argument('resource_id') args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Sign in - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): diff --git a/samples/refresh.py b/samples/refresh.py index 96937a6e3..ec0cdbab4 100644 --- a/samples/refresh.py +++ b/samples/refresh.py @@ -5,7 +5,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -13,30 +12,26 @@ def main(): parser = argparse.ArgumentParser(description='Trigger a refresh task on a workbook or datasource.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('--password', '-p', default=None, help='if not specified, you will be prompted') - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - + # Options specific to this sample parser.add_argument('resource_type', choices=['workbook', 'datasource']) parser.add_argument('resource_id') args = parser.parse_args() - if args.password is None: - password = getpass.getpass("Password: ") - else: - password = args.password - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): if args.resource_type == "workbook": diff --git a/samples/refresh_tasks.py b/samples/refresh_tasks.py index f722adb30..01f574ee4 100644 --- a/samples/refresh_tasks.py +++ b/samples/refresh_tasks.py @@ -6,7 +6,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -30,14 +29,16 @@ def handle_info(server, args): def main(): parser = argparse.ArgumentParser(description='Get all of the refresh tasks available on a server') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None) - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - + # Options specific to this sample subcommands = parser.add_subparsers() list_arguments = subcommands.add_parser('list') @@ -53,19 +54,13 @@ def main(): args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password, args.site) - server = TSC.Server(args.server) - server.version = '2.6' + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): args.func(server, args) diff --git a/samples/set_http_options.py b/samples/set_http_options.py index 9316dfdde..8fad2a10c 100644 --- a/samples/set_http_options.py +++ b/samples/set_http_options.py @@ -6,7 +6,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -15,21 +14,26 @@ def main(): parser = argparse.ArgumentParser(description='List workbooks on site, with option set to ignore SSL verification.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') + # Options specific to this sample + # This sample has no additional options, yet. If you add some, please add them here args = parser.parse_args() - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Create required objects for sign in - tableau_auth = TSC.TableauAuth(args.username, password) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server) # Step 2: Set http options to disable verifying SSL diff --git a/samples/set_refresh_schedule.py b/samples/set_refresh_schedule.py index 2d4761560..37526ccc8 100644 --- a/samples/set_refresh_schedule.py +++ b/samples/set_refresh_schedule.py @@ -7,7 +7,6 @@ import argparse -import getpass import logging import tableauserverclient as TSC @@ -15,11 +14,16 @@ def usage(args): parser = argparse.ArgumentParser(description='Set refresh schedule for a workbook or datasource.') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - parser.add_argument('--password', '-p', default=None) + # Options specific to this sample group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--workbook', '-w') group.add_argument('--datasource', '-d') @@ -61,18 +65,13 @@ def assign_to_schedule(server, workbook_or_datasource, schedule): def run(args): - password = args.password - if password is None: - password = getpass.getpass("Password: ") - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) # Step 1: Sign in to server. - tableau_auth = TSC.TableauAuth(args.username, password) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) - with server.auth.sign_in(tableau_auth): if args.workbook: item = get_workbook_by_name(server, args.workbook) diff --git a/samples/update_connection.py b/samples/update_connection.py index 3449441a4..7ac67fd76 100644 --- a/samples/update_connection.py +++ b/samples/update_connection.py @@ -5,7 +5,6 @@ #### import argparse -import getpass import logging import tableauserverclient as TSC @@ -13,14 +12,16 @@ def main(): parser = argparse.ArgumentParser(description='Update a connection on a datasource or workbook to embed credentials') + # Common options; please keep those in sync across all samples parser.add_argument('--server', '-s', required=True, help='server address') - parser.add_argument('--username', '-u', required=True, help='username to sign into server') - parser.add_argument('--site', '-S', default=None) - parser.add_argument('-p', default=None) - + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', help='desired logging level (set to error by default)') - + # Options specific to this sample parser.add_argument('resource_type', choices=['workbook', 'datasource']) parser.add_argument('resource_id') parser.add_argument('connection_id') @@ -29,17 +30,11 @@ def main(): args = parser.parse_args() - if args.p is None: - password = getpass.getpass("Password: ") - else: - password = args.p - # Set logging level based on user input, or error by default logging_level = getattr(logging, args.logging_level.upper()) logging.basicConfig(level=logging_level) - # SIGN IN - tableau_auth = TSC.TableauAuth(args.username, password, args.site) + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) server = TSC.Server(args.server, use_server_version=True) with server.auth.sign_in(tableau_auth): endpoint = { From d043e58151b6ce497bc79e2f5568680903a53d81 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 23 Sep 2021 11:32:30 +0200 Subject: [PATCH 19/30] Add example for querying metadata API (#895) --- samples/metadata_query.py | 64 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 samples/metadata_query.py diff --git a/samples/metadata_query.py b/samples/metadata_query.py new file mode 100644 index 000000000..7cd321f0a --- /dev/null +++ b/samples/metadata_query.py @@ -0,0 +1,64 @@ +#### +# This script demonstrates how to use the metadata API to query information on a published data source +# +# To run the script, you must have installed Python 3.5 or later. +#### + +import argparse +import logging +from pprint import pprint + +import tableauserverclient as TSC + + +def main(): + parser = argparse.ArgumentParser(description='Use the metadata API to get information on a published data source.') + # Common options; please keep those in sync across all samples + parser.add_argument('--server', '-s', required=True, help='server address') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-n', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') + parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', + help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('datasource_name', nargs='?', help="The name of the published datasource. If not present, we query all data sources.") + + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + # Sign in to server + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) + with server.auth.sign_in(tableau_auth): + # Execute the query + result = server.metadata.query(""" + query useMetadataApiToQueryOrdersDatabases($name: String){ + publishedDatasources (filter: {name: $name}) { + luid + name + description + projectName + fields { + name + } + } + }""", {"name": args.datasource_name}) + + # Display warnings/errors (if any) + if result.get("errors"): + print("### Errors/Warnings:") + pprint(result["errors"]) + + # Print the results + if result.get("data"): + print("### Results:") + pprint(result["data"]["publishedDatasources"]) + +if __name__ == '__main__': + main() From 95bb0cad5a5c88d9ebd02c71cd17f145f8dc9542 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Sat, 18 Sep 2021 22:07:44 +0200 Subject: [PATCH 20/30] Expose the `fileuploads` API endpoint We had at least two independent re-implementations [1, 2] of file uploads within the last 4 months. And this was despite the fact that both projects already used TSC which would offer this functionality. Currently, the upload functionality in TSC is hard to discover as it is not exposed like all other REST functions. Instead of `server.fileuploads`, one has to first create an instance of the (undocumented) `Fileuploads` class. The upload functionality was probably because it should be usually unnecessary: The uploaded files are usually part of publishing a workbook/datasource/... and the corresponding `datasources.publish` (and similar) already take care of the upload internally. However, TSC isn't always up-to-date with new REST APIs, and by exposing file uploads directly we can make sure to offer the best possible experience to users of TSC also in those transition periods. This commit: * turns the `Fileuploads` class into a normal endpoint class which is not tied to one upload (So far, `Fileuploads` was not stateless. Now it is) * adds the endpoint to `server`, such that file uploads are available as `server.fileuploads` * adjusts all other users to use `server.fileuploads` instead of constructing an ad hoc instance of the `Fileuploads` class Documentation will be added in a separate commit. [1] https://github.com/jharris126/tableau-data-update-api-samples/blob/41f51ae4d220de55caf63e91fe9eff5694b9456a/basic/basic_incremental_load.py#L23 [2] https://github.com/tableau/hyper-api-samples/blob/382e66481ec8339407cf9cfa5d41fcdcf3f6a0fb/Community-Supported/clouddb-extractor/tableau_restapi_helpers.py#L165 --- .../server/endpoint/__init__.py | 1 + .../server/endpoint/datasources_endpoint.py | 3 +- .../server/endpoint/fileuploads_endpoint.py | 36 ++++++++----------- .../server/endpoint/flows_endpoint.py | 3 +- .../server/endpoint/workbooks_endpoint.py | 3 +- tableauserverclient/server/server.py | 2 ++ test/test_fileuploads.py | 15 +++----- 7 files changed, 24 insertions(+), 39 deletions(-) diff --git a/tableauserverclient/server/endpoint/__init__.py b/tableauserverclient/server/endpoint/__init__.py index 8653c0254..29fe93299 100644 --- a/tableauserverclient/server/endpoint/__init__.py +++ b/tableauserverclient/server/endpoint/__init__.py @@ -5,6 +5,7 @@ from .databases_endpoint import Databases from .endpoint import Endpoint from .favorites_endpoint import Favorites +from .fileuploads_endpoint import Fileuploads from .flows_endpoint import Flows from .exceptions import ( ServerResponseError, diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index 7b80c2b2b..b67332f7d 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -2,7 +2,6 @@ from .exceptions import InternalServerError, MissingRequiredFieldError from .permissions_endpoint import _PermissionsEndpoint from .dqw_endpoint import _DataQualityWarningEndpoint -from .fileuploads_endpoint import Fileuploads from .resource_tagger import _ResourceTagger from .. import RequestFactory, DatasourceItem, PaginationItem, ConnectionItem from ..query import QuerySet @@ -244,7 +243,7 @@ def publish( # Determine if chunking is required (64MB is the limit for single upload method) if file_size >= FILESIZE_LIMIT: logger.info("Publishing {0} to server with chunking method (datasource over 64MB)".format(filename)) - upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file) + upload_session_id = self.parent_srv.fileuploads.upload(file) url = "{0}&uploadSessionId={1}".format(url, upload_session_id) xml_request, content_type = RequestFactory.Datasource.publish_req_chunked( datasource_item, connection_credentials, connections diff --git a/tableauserverclient/server/endpoint/fileuploads_endpoint.py b/tableauserverclient/server/endpoint/fileuploads_endpoint.py index 05a3ce17c..046406c16 100644 --- a/tableauserverclient/server/endpoint/fileuploads_endpoint.py +++ b/tableauserverclient/server/endpoint/fileuploads_endpoint.py @@ -14,7 +14,6 @@ class Fileuploads(Endpoint): def __init__(self, parent_srv): super(Fileuploads, self).__init__(parent_srv) - self.upload_id = "" @property def baseurl(self): @@ -25,21 +24,18 @@ def initiate(self): url = self.baseurl server_response = self.post_request(url, "") fileupload_item = FileuploadItem.from_response(server_response.content, self.parent_srv.namespace) - self.upload_id = fileupload_item.upload_session_id - logger.info("Initiated file upload session (ID: {0})".format(self.upload_id)) - return self.upload_id + upload_id = fileupload_item.upload_session_id + logger.info("Initiated file upload session (ID: {0})".format(upload_id)) + return upload_id @api(version="2.0") - def append(self, xml_request, content_type): - if not self.upload_id: - error = "File upload session must be initiated first." - raise MissingRequiredFieldError(error) - url = "{0}/{1}".format(self.baseurl, self.upload_id) - server_response = self.put_request(url, xml_request, content_type) - logger.info("Uploading a chunk to session (ID: {0})".format(self.upload_id)) + def append(self, upload_id, data, content_type): + url = "{0}/{1}".format(self.baseurl, upload_id) + server_response = self.put_request(url, data, content_type) + logger.info("Uploading a chunk to session (ID: {0})".format(upload_id)) return FileuploadItem.from_response(server_response.content, self.parent_srv.namespace) - def read_chunks(self, file): + def _read_chunks(self, file): file_opened = False try: file_content = open(file, "rb") @@ -55,15 +51,11 @@ def read_chunks(self, file): break yield chunked_content - @classmethod - def upload_chunks(cls, parent_srv, file): - file_uploader = cls(parent_srv) - upload_id = file_uploader.initiate() - - chunks = file_uploader.read_chunks(file) - for chunk in chunks: - xml_request, content_type = RequestFactory.Fileupload.chunk_req(chunk) - fileupload_item = file_uploader.append(xml_request, content_type) + def upload(self, file): + upload_id = self.initiate() + for chunk in self._read_chunks(file): + request, content_type = RequestFactory.Fileupload.chunk_req(chunk) + fileupload_item = self.append(upload_id, request, content_type) logger.info("\tPublished {0}MB".format(fileupload_item.file_size)) - logger.info("\tCommitting file upload...") + logger.info("File upload finished (ID: {0})".format(upload_id)) return upload_id diff --git a/tableauserverclient/server/endpoint/flows_endpoint.py b/tableauserverclient/server/endpoint/flows_endpoint.py index 475166aad..eb2de4ac9 100644 --- a/tableauserverclient/server/endpoint/flows_endpoint.py +++ b/tableauserverclient/server/endpoint/flows_endpoint.py @@ -2,7 +2,6 @@ from .exceptions import InternalServerError, MissingRequiredFieldError from .permissions_endpoint import _PermissionsEndpoint from .dqw_endpoint import _DataQualityWarningEndpoint -from .fileuploads_endpoint import Fileuploads from .resource_tagger import _ResourceTagger from .. import RequestFactory, FlowItem, PaginationItem, ConnectionItem from ...filesys_helpers import to_filename, make_download_path @@ -169,7 +168,7 @@ def publish(self, flow_item, file_path, mode, connections=None): # Determine if chunking is required (64MB is the limit for single upload method) if os.path.getsize(file_path) >= FILESIZE_LIMIT: logger.info("Publishing {0} to server with chunking method (flow over 64MB)".format(filename)) - upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file_path) + upload_session_id = self.parent_srv.fileuploads.upload(file_path) url = "{0}&uploadSessionId={1}".format(url, upload_session_id) xml_request, content_type = RequestFactory.Flow.publish_req_chunked(flow_item, connections) else: diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index df14674c6..a3f14c291 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -1,7 +1,6 @@ from .endpoint import QuerysetEndpoint, api, parameter_added_in from .exceptions import InternalServerError, MissingRequiredFieldError from .permissions_endpoint import _PermissionsEndpoint -from .fileuploads_endpoint import Fileuploads from .resource_tagger import _ResourceTagger from .. import RequestFactory, WorkbookItem, ConnectionItem, ViewItem, PaginationItem from ...models.job_item import JobItem @@ -344,7 +343,7 @@ def publish( # Determine if chunking is required (64MB is the limit for single upload method) if file_size >= FILESIZE_LIMIT: logger.info("Publishing {0} to server with chunking method (workbook over 64MB)".format(workbook_item.name)) - upload_session_id = Fileuploads.upload_chunks(self.parent_srv, file) + upload_session_id = self.parent_srv.fileuploads.upload(file) url = "{0}&uploadSessionId={1}".format(url, upload_session_id) conn_creds = connection_credentials xml_request, content_type = RequestFactory.Workbook.publish_req_chunked( diff --git a/tableauserverclient/server/server.py b/tableauserverclient/server/server.py index 057c98877..a20694a92 100644 --- a/tableauserverclient/server/server.py +++ b/tableauserverclient/server/server.py @@ -24,6 +24,7 @@ DataAccelerationReport, Favorites, DataAlerts, + Fileuploads, ) from .endpoint.exceptions import ( EndpointUnavailableError, @@ -82,6 +83,7 @@ def __init__(self, server_address, use_server_version=False): self.webhooks = Webhooks(self) self.data_acceleration_report = DataAccelerationReport(self) self.data_alerts = DataAlerts(self) + self.fileuploads = Fileuploads(self) self._namespace = Namespace() if use_server_version: diff --git a/test/test_fileuploads.py b/test/test_fileuploads.py index 9d115636f..51662e4a2 100644 --- a/test/test_fileuploads.py +++ b/test/test_fileuploads.py @@ -4,7 +4,6 @@ from ._utils import asset from tableauserverclient.server import Server -from tableauserverclient.server.endpoint.fileuploads_endpoint import Fileuploads TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets') FILEUPLOAD_INITIALIZE = os.path.join(TEST_ASSET_DIR, 'fileupload_initialize.xml') @@ -22,23 +21,18 @@ def setUp(self): self.baseurl = '{}/sites/{}/fileUploads'.format(self.server.baseurl, self.server.site_id) def test_read_chunks_file_path(self): - fileuploads = Fileuploads(self.server) - file_path = asset('SampleWB.twbx') - chunks = fileuploads.read_chunks(file_path) + chunks = self.server.fileuploads._read_chunks(file_path) for chunk in chunks: self.assertIsNotNone(chunk) def test_read_chunks_file_object(self): - fileuploads = Fileuploads(self.server) - with open(asset('SampleWB.twbx'), 'rb') as f: - chunks = fileuploads.read_chunks(f) + chunks = self.server.fileuploads._read_chunks(f) for chunk in chunks: self.assertIsNotNone(chunk) def test_upload_chunks_file_path(self): - fileuploads = Fileuploads(self.server) file_path = asset('SampleWB.twbx') upload_id = '7720:170fe6b1c1c7422dadff20f944d58a52-1:0' @@ -49,12 +43,11 @@ def test_upload_chunks_file_path(self): with requests_mock.mock() as m: m.post(self.baseurl, text=initialize_response_xml) m.put(self.baseurl + '/' + upload_id, text=append_response_xml) - actual = fileuploads.upload_chunks(self.server, file_path) + actual = self.server.fileuploads.upload(file_path) self.assertEqual(upload_id, actual) def test_upload_chunks_file_object(self): - fileuploads = Fileuploads(self.server) upload_id = '7720:170fe6b1c1c7422dadff20f944d58a52-1:0' with open(asset('SampleWB.twbx'), 'rb') as file_content: @@ -65,6 +58,6 @@ def test_upload_chunks_file_object(self): with requests_mock.mock() as m: m.post(self.baseurl, text=initialize_response_xml) m.put(self.baseurl + '/' + upload_id, text=append_response_xml) - actual = fileuploads.upload_chunks(self.server, file_content) + actual = self.server.fileuploads.upload(file_content) self.assertEqual(upload_id, actual) From 3abe4e94def4bc56fb188e04aa1e51fb71fcae1b Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 23 Sep 2021 13:17:55 +0200 Subject: [PATCH 21/30] Make `Fileuploads._read_chunks` exception-safe --- .../server/endpoint/fileuploads_endpoint.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/tableauserverclient/server/endpoint/fileuploads_endpoint.py b/tableauserverclient/server/endpoint/fileuploads_endpoint.py index 046406c16..b70cffbaa 100644 --- a/tableauserverclient/server/endpoint/fileuploads_endpoint.py +++ b/tableauserverclient/server/endpoint/fileuploads_endpoint.py @@ -43,13 +43,15 @@ def _read_chunks(self, file): except TypeError: file_content = file - while True: - chunked_content = file_content.read(CHUNK_SIZE) - if not chunked_content: - if file_opened: - file_content.close() - break - yield chunked_content + try: + while True: + chunked_content = file_content.read(CHUNK_SIZE) + if not chunked_content: + break + yield chunked_content + finally: + if file_opened: + file_content.close() def upload(self, file): upload_id = self.initiate() From 7c03396b7b52ab40c60789f87db04998314812eb Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Wed, 29 Sep 2021 09:18:59 +0200 Subject: [PATCH 22/30] Add support for scheduling Data Update jobs (#891) This commit adds support for the `datasources//data` endpoint through which one can schedule jobs to update the data within a published live-to-Hyper datasource on the server. The new `datasources.update_data` expects the arguments: * a datasource or a connection: If the datasource only contains a single connections, the datasource is sufficient to identify which Hyper file should be updated. Otherwise, for datasources with multiple connections, the connections has to be provided. This distinction happens on the server, so the client library only needs to provide a way to specify either of both. * a `request_id` which will be used to ensure idempotency on the server. This parameter is simply passed as a HTTP header . * an `actions` list, specifying how exactly the data on the server should be modified. We expect the caller to provide list following the structure documented in the REST API documentation. TSC does not validate this object and simply passes it through to the server. * an optional `payload` file: For actions like `insert`, one can provide a Hyper file which contains the newly inserted tuples or other payload data. TSC will upload this file to the server and then hand it over to the update-API endpoint. Besides the addition of the `datasources.update_data` itself, this commit also adds some infrastructure changes, e.g., to enable sending PATCH requests and HTTP headers. --- samples/update_datasource_data.py | 74 +++++++++++++++++ .../server/endpoint/datasources_endpoint.py | 29 +++++++ .../server/endpoint/endpoint.py | 17 +++- test/assets/datasource_data_update.xml | 9 ++ test/test_datasource.py | 82 +++++++++++++++++++ 5 files changed, 209 insertions(+), 2 deletions(-) create mode 100644 samples/update_datasource_data.py create mode 100644 test/assets/datasource_data_update.xml diff --git a/samples/update_datasource_data.py b/samples/update_datasource_data.py new file mode 100644 index 000000000..9465ae9ee --- /dev/null +++ b/samples/update_datasource_data.py @@ -0,0 +1,74 @@ +#### +# This script demonstrates how to update the data within a published +# live-to-Hyper datasource on server. +# +# The sample is hardcoded against the `World Indicators` dataset and +# expects to receive the LUID of a published datasource containing +# that data. To create such a published datasource, you can use: +# ./publish_datasource.py --file ../test/assets/World\ Indicators.hyper +# which will print you the LUID of the datasource. +# +# Before running this script, the datasource will contain a region `Europe`. +# After running this script, that region will be gone. +# +#### + +import argparse +import uuid +import logging + +import tableauserverclient as TSC + + +def main(): + parser = argparse.ArgumentParser(description='Delete the `Europe` region from a published `World Indicators` datasource.') + # Common options; please keep those in sync across all samples + parser.add_argument('--server', '-s', required=True, help='server address') + parser.add_argument('--site', '-S', help='site name') + parser.add_argument('--token-name', '-p', required=True, + help='name of the personal access token used to sign into the server') + parser.add_argument('--token-value', '-v', required=True, + help='value of the personal access token used to sign into the server') + parser.add_argument('--logging-level', '-l', choices=['debug', 'info', 'error'], default='error', + help='desired logging level (set to error by default)') + # Options specific to this sample + parser.add_argument('datasource_id', help="The LUID of the `World Indicators` datasource") + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=True) + with server.auth.sign_in(tableau_auth): + # We use a unique `request_id` for every request. + # In case the submission of the update job fails, we won't know wether the job was submitted + # or not. It could be that the server received the request, changed the data, but then the + # network connection broke down. + # If you want to have a way to retry, e.g., inserts while making sure they aren't duplicated, + # you need to use `request_id` for that purpose. + # In our case, we don't care about retries. And the delete is idempotent anyway. + # Hence, we simply use a randomly generated request id. + request_id = str(uuid.uuid4()) + + # This action will delete all rows with `Region=Europe` from the published data source. + # Other actions (inserts, updates, ...) are also available. For more information see + # https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_how_to_update_data_to_hyper.htm + actions = [ + { + "action": "delete", + "target-table": "Extract", + "target-schema": "Extract", + "condition": {"op": "eq", "target-col": "Region", "const": {"type": "string", "v": "Europe"}} + } + ] + + job = server.datasources.update_data(args.datasource_id, request_id=request_id, actions=actions) + + # TODO: Add a flag that will poll and wait for the returned job to be done + print(job) + +if __name__ == '__main__': + main() diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index b67332f7d..997921312 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -18,6 +18,7 @@ import copy import cgi from contextlib import closing +import json # The maximum size of a file that can be published in a single request is 64MB FILESIZE_LIMIT = 1024 * 1024 * 64 # 64MB @@ -282,6 +283,34 @@ def publish( logger.info("Published {0} (ID: {1})".format(filename, new_datasource.id)) return new_datasource + @api(version="3.13") + def update_data(self, datasource_or_connection_item, *, request_id, actions, payload = None): + if isinstance(datasource_or_connection_item, DatasourceItem): + datasource_id = datasource_or_connection_item.id + url = "{0}/{1}/data".format(self.baseurl, datasource_id) + elif isinstance(datasource_or_connection_item, ConnectionItem): + datasource_id = datasource_or_connection_item.datasource_id + connection_id = datasource_or_connection_item.id + url = "{0}/{1}/connections/{2}/data".format(self.baseurl, datasource_id, connection_id) + else: + assert isinstance(datasource_or_connection_item, str) + url = "{0}/{1}/data".format(self.baseurl, datasource_or_connection_item) + + if payload is not None: + if not os.path.isfile(payload): + error = "File path does not lead to an existing file." + raise IOError(error) + + logger.info("Uploading {0} to server with chunking method for Update job".format(payload)) + upload_session_id = self.parent_srv.fileuploads.upload(payload) + url = "{0}?uploadSessionId={1}".format(url, upload_session_id) + + json_request = json.dumps({"actions": actions}) + parameters = {"headers": {"requestid": request_id}} + server_response = self.patch_request(url, json_request, "application/json", parameters=parameters) + new_job = JobItem.from_response(server_response.content, self.parent_srv.namespace)[0] + return new_job + @api(version="2.0") def populate_permissions(self, item): self._permissions.populate(item) diff --git a/tableauserverclient/server/endpoint/endpoint.py b/tableauserverclient/server/endpoint/endpoint.py index f7d88b0e6..31291abc9 100644 --- a/tableauserverclient/server/endpoint/endpoint.py +++ b/tableauserverclient/server/endpoint/endpoint.py @@ -55,7 +55,9 @@ def _make_request( ): parameters = parameters or {} parameters.update(self.parent_srv.http_options) - parameters["headers"] = Endpoint._make_common_headers(auth_token, content_type) + if not "headers" in parameters: + parameters["headers"] = {} + parameters["headers"].update(Endpoint._make_common_headers(auth_token, content_type)) if content is not None: parameters["data"] = content @@ -118,13 +120,14 @@ def delete_request(self, url): # We don't return anything for a delete self._make_request(self.parent_srv.session.delete, url, auth_token=self.parent_srv.auth_token) - def put_request(self, url, xml_request=None, content_type="text/xml"): + def put_request(self, url, xml_request=None, content_type="text/xml", parameters=None): return self._make_request( self.parent_srv.session.put, url, content=xml_request, auth_token=self.parent_srv.auth_token, content_type=content_type, + parameters=parameters, ) def post_request(self, url, xml_request, content_type="text/xml", parameters=None): @@ -137,6 +140,16 @@ def post_request(self, url, xml_request, content_type="text/xml", parameters=Non parameters=parameters, ) + def patch_request(self, url, xml_request, content_type="text/xml", parameters=None): + return self._make_request( + self.parent_srv.session.patch, + url, + content=xml_request, + auth_token=self.parent_srv.auth_token, + content_type=content_type, + parameters=parameters, + ) + def api(version): """Annotate the minimum supported version for an endpoint. diff --git a/test/assets/datasource_data_update.xml b/test/assets/datasource_data_update.xml new file mode 100644 index 000000000..305caaf0b --- /dev/null +++ b/test/assets/datasource_data_update.xml @@ -0,0 +1,9 @@ + + + + + + 7ecaccd8-39b0-4875-a77d-094f6e930019 + + + diff --git a/test/test_datasource.py b/test/test_datasource.py index 42d1dfade..e4ef01a29 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -1,3 +1,4 @@ +from tableauserverclient.server.endpoint.fileuploads_endpoint import Fileuploads import unittest from io import BytesIO import os @@ -22,6 +23,7 @@ PUBLISH_XML_ASYNC = 'datasource_publish_async.xml' REFRESH_XML = 'datasource_refresh.xml' UPDATE_XML = 'datasource_update.xml' +UPDATE_DATA_XML = 'datasource_data_update.xml' UPDATE_CONNECTION_XML = 'datasource_connection_update.xml' @@ -355,6 +357,86 @@ def test_refresh_object(self): # We only check the `id`; remaining fields are already tested in `test_refresh_id` self.assertEqual('7c3d599e-949f-44c3-94a1-f30ba85757e4', new_job.id) + def test_update_data_datasource_object(self): + """Calling `update_data` with a `DatasourceItem` should update that datasource""" + self.server.version = "3.13" + self.baseurl = self.server.datasources.baseurl + + datasource = TSC.DatasourceItem('') + datasource._id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' + response_xml = read_xml_asset(UPDATE_DATA_XML) + with requests_mock.mock() as m: + m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data', + status_code=202, headers={"requestid": "test_id"}, text=response_xml) + new_job = self.server.datasources.update_data(datasource, request_id="test_id", actions=[]) + + self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) + self.assertEqual('UpdateUploadedFile', new_job.type) + self.assertEqual(None, new_job.progress) + self.assertEqual('2021-09-18T09:40:12Z', format_datetime(new_job.created_at)) + self.assertEqual(-1, new_job.finish_code) + + def test_update_data_connection_object(self): + """Calling `update_data` with a `ConnectionItem` should update that connection""" + self.server.version = "3.13" + self.baseurl = self.server.datasources.baseurl + + connection = TSC.ConnectionItem() + connection._datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' + connection._id = '7ecaccd8-39b0-4875-a77d-094f6e930019' + response_xml = read_xml_asset(UPDATE_DATA_XML) + with requests_mock.mock() as m: + m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/connections/7ecaccd8-39b0-4875-a77d-094f6e930019/data', + status_code=202, headers={"requestid": "test_id"}, text=response_xml) + new_job = self.server.datasources.update_data(connection, request_id="test_id", actions=[]) + + # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) + + def test_update_data_datasource_string(self): + """For convenience, calling `update_data` with a `str` should update the datasource with the corresponding UUID""" + self.server.version = "3.13" + self.baseurl = self.server.datasources.baseurl + + datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' + response_xml = read_xml_asset(UPDATE_DATA_XML) + with requests_mock.mock() as m: + m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data', + status_code=202, headers={"requestid": "test_id"}, text=response_xml) + new_job = self.server.datasources.update_data(datasource_id, request_id="test_id", actions=[]) + + # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) + + def test_update_data_datasource_payload_file(self): + """If `payload` is present, we upload it and associate the job with it""" + self.server.version = "3.13" + self.baseurl = self.server.datasources.baseurl + + datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' + mock_upload_id = '10051:c3e56879876842d4b3600f20c1f79876-0:0' + response_xml = read_xml_asset(UPDATE_DATA_XML) + with requests_mock.mock() as rm, \ + unittest.mock.patch.object(Fileuploads, "upload", return_value=mock_upload_id): + rm.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data?uploadSessionId=' + mock_upload_id, + status_code=202, headers={"requestid": "test_id"}, text=response_xml) + new_job = self.server.datasources.update_data(datasource_id, request_id="test_id", + actions=[], payload=asset('World Indicators.hyper')) + + # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) + + def test_update_data_datasource_invalid_payload_file(self): + """If `payload` points to a non-existing file, we report an error""" + self.server.version = "3.13" + self.baseurl = self.server.datasources.baseurl + datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' + with self.assertRaises(IOError) as cm: + self.server.datasources.update_data(datasource_id, request_id="test_id", + actions=[], payload='no/such/file.missing') + exception = cm.exception + self.assertEqual(str(exception), "File path does not lead to an existing file.") + def test_delete(self): with requests_mock.mock() as m: m.delete(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', status_code=204) From 9ac17e4deb513fe85518e88ece88e781ae0c79ca Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Mon, 4 Oct 2021 13:24:48 +0200 Subject: [PATCH 23/30] Rename `datasource.update_data` to `datasource.update_hyper_data` (#906) As suggested by @dzucker-tab in #893 --- samples/update_datasource_data.py | 2 +- .../server/endpoint/datasources_endpoint.py | 2 +- test/test_datasource.py | 42 +++++++++---------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/samples/update_datasource_data.py b/samples/update_datasource_data.py index 9465ae9ee..3633ebaf6 100644 --- a/samples/update_datasource_data.py +++ b/samples/update_datasource_data.py @@ -65,7 +65,7 @@ def main(): } ] - job = server.datasources.update_data(args.datasource_id, request_id=request_id, actions=actions) + job = server.datasources.update_hyper_data(args.datasource_id, request_id=request_id, actions=actions) # TODO: Add a flag that will poll and wait for the returned job to be done print(job) diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index 997921312..c031004e0 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -284,7 +284,7 @@ def publish( return new_datasource @api(version="3.13") - def update_data(self, datasource_or_connection_item, *, request_id, actions, payload = None): + def update_hyper_data(self, datasource_or_connection_item, *, request_id, actions, payload = None): if isinstance(datasource_or_connection_item, DatasourceItem): datasource_id = datasource_or_connection_item.id url = "{0}/{1}/data".format(self.baseurl, datasource_id) diff --git a/test/test_datasource.py b/test/test_datasource.py index e4ef01a29..68d6d1384 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -23,7 +23,7 @@ PUBLISH_XML_ASYNC = 'datasource_publish_async.xml' REFRESH_XML = 'datasource_refresh.xml' UPDATE_XML = 'datasource_update.xml' -UPDATE_DATA_XML = 'datasource_data_update.xml' +UPDATE_HYPER_DATA_XML = 'datasource_data_update.xml' UPDATE_CONNECTION_XML = 'datasource_connection_update.xml' @@ -357,18 +357,18 @@ def test_refresh_object(self): # We only check the `id`; remaining fields are already tested in `test_refresh_id` self.assertEqual('7c3d599e-949f-44c3-94a1-f30ba85757e4', new_job.id) - def test_update_data_datasource_object(self): - """Calling `update_data` with a `DatasourceItem` should update that datasource""" + def test_update_hyper_data_datasource_object(self): + """Calling `update_hyper_data` with a `DatasourceItem` should update that datasource""" self.server.version = "3.13" self.baseurl = self.server.datasources.baseurl datasource = TSC.DatasourceItem('') datasource._id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' - response_xml = read_xml_asset(UPDATE_DATA_XML) + response_xml = read_xml_asset(UPDATE_HYPER_DATA_XML) with requests_mock.mock() as m: m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data', status_code=202, headers={"requestid": "test_id"}, text=response_xml) - new_job = self.server.datasources.update_data(datasource, request_id="test_id", actions=[]) + new_job = self.server.datasources.update_hyper_data(datasource, request_id="test_id", actions=[]) self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) self.assertEqual('UpdateUploadedFile', new_job.type) @@ -376,63 +376,63 @@ def test_update_data_datasource_object(self): self.assertEqual('2021-09-18T09:40:12Z', format_datetime(new_job.created_at)) self.assertEqual(-1, new_job.finish_code) - def test_update_data_connection_object(self): - """Calling `update_data` with a `ConnectionItem` should update that connection""" + def test_update_hyper_data_connection_object(self): + """Calling `update_hyper_data` with a `ConnectionItem` should update that connection""" self.server.version = "3.13" self.baseurl = self.server.datasources.baseurl connection = TSC.ConnectionItem() connection._datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' connection._id = '7ecaccd8-39b0-4875-a77d-094f6e930019' - response_xml = read_xml_asset(UPDATE_DATA_XML) + response_xml = read_xml_asset(UPDATE_HYPER_DATA_XML) with requests_mock.mock() as m: m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/connections/7ecaccd8-39b0-4875-a77d-094f6e930019/data', status_code=202, headers={"requestid": "test_id"}, text=response_xml) - new_job = self.server.datasources.update_data(connection, request_id="test_id", actions=[]) + new_job = self.server.datasources.update_hyper_data(connection, request_id="test_id", actions=[]) - # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + # We only check the `id`; remaining fields are already tested in `test_update_hyper_data_datasource_object` self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) - def test_update_data_datasource_string(self): - """For convenience, calling `update_data` with a `str` should update the datasource with the corresponding UUID""" + def test_update_hyper_data_datasource_string(self): + """For convenience, calling `update_hyper_data` with a `str` should update the datasource with the corresponding UUID""" self.server.version = "3.13" self.baseurl = self.server.datasources.baseurl datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' - response_xml = read_xml_asset(UPDATE_DATA_XML) + response_xml = read_xml_asset(UPDATE_HYPER_DATA_XML) with requests_mock.mock() as m: m.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data', status_code=202, headers={"requestid": "test_id"}, text=response_xml) - new_job = self.server.datasources.update_data(datasource_id, request_id="test_id", actions=[]) + new_job = self.server.datasources.update_hyper_data(datasource_id, request_id="test_id", actions=[]) - # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + # We only check the `id`; remaining fields are already tested in `test_update_hyper_data_datasource_object` self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) - def test_update_data_datasource_payload_file(self): + def test_update_hyper_data_datasource_payload_file(self): """If `payload` is present, we upload it and associate the job with it""" self.server.version = "3.13" self.baseurl = self.server.datasources.baseurl datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' mock_upload_id = '10051:c3e56879876842d4b3600f20c1f79876-0:0' - response_xml = read_xml_asset(UPDATE_DATA_XML) + response_xml = read_xml_asset(UPDATE_HYPER_DATA_XML) with requests_mock.mock() as rm, \ unittest.mock.patch.object(Fileuploads, "upload", return_value=mock_upload_id): rm.patch(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/data?uploadSessionId=' + mock_upload_id, status_code=202, headers={"requestid": "test_id"}, text=response_xml) - new_job = self.server.datasources.update_data(datasource_id, request_id="test_id", + new_job = self.server.datasources.update_hyper_data(datasource_id, request_id="test_id", actions=[], payload=asset('World Indicators.hyper')) - # We only check the `id`; remaining fields are already tested in `test_update_data_datasource_object` + # We only check the `id`; remaining fields are already tested in `test_update_hyper_data_datasource_object` self.assertEqual('5c0ba560-c959-424e-b08a-f32ef0bfb737', new_job.id) - def test_update_data_datasource_invalid_payload_file(self): + def test_update_hyper_data_datasource_invalid_payload_file(self): """If `payload` points to a non-existing file, we report an error""" self.server.version = "3.13" self.baseurl = self.server.datasources.baseurl datasource_id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' with self.assertRaises(IOError) as cm: - self.server.datasources.update_data(datasource_id, request_id="test_id", + self.server.datasources.update_hyper_data(datasource_id, request_id="test_id", actions=[], payload='no/such/file.missing') exception = cm.exception self.assertEqual(str(exception), "File path does not lead to an existing file.") From 9ccc7133fa73de9f4c607afc035c5dc7f261985f Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 7 Oct 2021 00:51:02 +0200 Subject: [PATCH 24/30] Add `jobs.wait_for_job` method (#903) This commit adds a `wait_for_job` method which will repeatedly poll a job's status until that job is finished. Internally, it uses an exponential backoff for the polling intervals. That way, it is snappy for fast-running jobs without putting too much load on the server for long-running jobs. It returns the successfully finished `JobItem` object which might be of interest to the caller, e.g. to inspect the reported `started_at` `finished_at` times or the `notes`. For failed jobs, `wait_for_job` raises an exception. That way, we ensure that errors in jobs don't accidentally go unnoticed. The `jobs` object can still be retrieved from the exception object, if required. --- samples/refresh.py | 13 ++-- samples/update_datasource_data.py | 8 ++- tableauserverclient/exponential_backoff.py | 30 +++++++++ tableauserverclient/models/job_item.py | 12 +++- .../server/endpoint/exceptions.py | 13 ++++ .../server/endpoint/jobs_endpoint.py | 27 +++++++- test/_utils.py | 18 ++++++ test/test_datasource.py | 2 +- test/test_exponential_backoff.py | 62 +++++++++++++++++++ test/test_job.py | 41 ++++++++++-- test/test_workbook.py | 2 +- 11 files changed, 213 insertions(+), 15 deletions(-) create mode 100644 tableauserverclient/exponential_backoff.py create mode 100644 test/test_exponential_backoff.py diff --git a/samples/refresh.py b/samples/refresh.py index ec0cdbab4..7b2618b6e 100644 --- a/samples/refresh.py +++ b/samples/refresh.py @@ -39,16 +39,19 @@ def main(): resource = server.workbooks.get_by_id(args.resource_id) # trigger the refresh, you'll get a job id back which can be used to poll for when the refresh is done - results = server.workbooks.refresh(args.resource_id) + job = server.workbooks.refresh(args.resource_id) else: # Get the datasource by its Id to make sure it exists resource = server.datasources.get_by_id(args.resource_id) # trigger the refresh, you'll get a job id back which can be used to poll for when the refresh is done - results = server.datasources.refresh(resource) - - print(results) - # TODO: Add a flag that will poll and wait for the returned job to be done + job = server.datasources.refresh(resource) + + print(f"Update job posted (ID: {job.id})") + print("Waiting for job...") + # `wait_for_job` will throw if the job isn't executed successfully + job = server.jobs.wait_for_job(job) + print("Job finished succesfully") if __name__ == '__main__': diff --git a/samples/update_datasource_data.py b/samples/update_datasource_data.py index 3633ebaf6..74c8ea6fb 100644 --- a/samples/update_datasource_data.py +++ b/samples/update_datasource_data.py @@ -67,8 +67,12 @@ def main(): job = server.datasources.update_hyper_data(args.datasource_id, request_id=request_id, actions=actions) - # TODO: Add a flag that will poll and wait for the returned job to be done - print(job) + print(f"Update job posted (ID: {job.id})") + print("Waiting for job...") + # `wait_for_job` will throw if the job isn't executed successfully + job = server.jobs.wait_for_job(job) + print("Job finished succesfully") + if __name__ == '__main__': main() diff --git a/tableauserverclient/exponential_backoff.py b/tableauserverclient/exponential_backoff.py new file mode 100644 index 000000000..2b3ded109 --- /dev/null +++ b/tableauserverclient/exponential_backoff.py @@ -0,0 +1,30 @@ +import time + +# Polling for server-side events (such as job completion) uses exponential backoff for the sleep intervals between polls +ASYNC_POLL_MIN_INTERVAL=0.5 +ASYNC_POLL_MAX_INTERVAL=30 +ASYNC_POLL_BACKOFF_FACTOR=1.4 + + +class ExponentialBackoffTimer(): + def __init__(self, *, timeout=None): + self.start_time = time.time() + self.timeout = timeout + self.current_sleep_interval = ASYNC_POLL_MIN_INTERVAL + + def sleep(self): + max_sleep_time = ASYNC_POLL_MAX_INTERVAL + if self.timeout is not None: + elapsed = (time.time() - self.start_time) + if elapsed >= self.timeout: + raise TimeoutError(f"Timeout after {elapsed} seconds waiting for asynchronous event") + remaining_time = self.timeout - elapsed + # Usually, we would sleep for `ASYNC_POLL_MAX_INTERVAL`, but we don't want to sleep over the timeout + max_sleep_time = min(ASYNC_POLL_MAX_INTERVAL, remaining_time) + # We want to sleep at least for `ASYNC_POLL_MIN_INTERVAL`. This is important to ensure that, as we get + # closer to the timeout, we don't accidentally wake up multiple times and hit the server in rapid succession + # due to waking up to early from the `sleep`. + max_sleep_time = max(max_sleep_time, ASYNC_POLL_MIN_INTERVAL) + + time.sleep(min(self.current_sleep_interval, max_sleep_time)) + self.current_sleep_interval *= ASYNC_POLL_BACKOFF_FACTOR \ No newline at end of file diff --git a/tableauserverclient/models/job_item.py b/tableauserverclient/models/job_item.py index 7a3a50861..2a8b6b509 100644 --- a/tableauserverclient/models/job_item.py +++ b/tableauserverclient/models/job_item.py @@ -3,6 +3,16 @@ class JobItem(object): + class FinishCode: + """ + Status codes as documented on + https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref_jobs_tasks_and_schedules.htm#query_job + """ + Success = 0 + Failed = 1 + Cancelled = 2 + + def __init__( self, id_, @@ -89,7 +99,7 @@ def _parse_element(cls, element, ns): created_at = parse_datetime(element.get("createdAt", None)) started_at = parse_datetime(element.get("startedAt", None)) completed_at = parse_datetime(element.get("completedAt", None)) - finish_code = element.get("finishCode", -1) + finish_code = int(element.get("finishCode", -1)) notes = [note.text for note in element.findall(".//t:notes", namespaces=ns)] or None mode = element.get("mode", None) return cls( diff --git a/tableauserverclient/server/endpoint/exceptions.py b/tableauserverclient/server/endpoint/exceptions.py index 9a9a81d77..693817ddc 100644 --- a/tableauserverclient/server/endpoint/exceptions.py +++ b/tableauserverclient/server/endpoint/exceptions.py @@ -64,3 +64,16 @@ def __str__(self): from pprint import pformat return pformat(self.error) + + +class JobFailedException(Exception): + def __init__(self, job): + self.notes = job.notes + self.job = job + + def __str__(self): + return f"Job {self.job.id} failed with notes {self.notes}" + + +class JobCanceledException(JobFailedException): + pass diff --git a/tableauserverclient/server/endpoint/jobs_endpoint.py b/tableauserverclient/server/endpoint/jobs_endpoint.py index 6079ca788..906d4a19e 100644 --- a/tableauserverclient/server/endpoint/jobs_endpoint.py +++ b/tableauserverclient/server/endpoint/jobs_endpoint.py @@ -1,6 +1,8 @@ from .endpoint import Endpoint, api +from .exceptions import JobCanceledException, JobFailedException from .. import JobItem, BackgroundJobItem, PaginationItem from ..request_options import RequestOptionsBase +from ...exponential_backoff import ExponentialBackoffTimer import logging @@ -12,7 +14,6 @@ logger = logging.getLogger("tableau.endpoint.jobs") - class Jobs(Endpoint): @property def baseurl(self): @@ -48,3 +49,27 @@ def get_by_id(self, job_id): server_response = self.get_request(url) new_job = JobItem.from_response(server_response.content, self.parent_srv.namespace)[0] return new_job + + def wait_for_job(self, job_id, *, timeout=None): + if isinstance(job_id, JobItem): + job_id = job_id.id + assert isinstance(job_id, str) + logger.debug(f"Waiting for job {job_id}") + + backoffTimer = ExponentialBackoffTimer(timeout=timeout) + job = self.get_by_id(job_id) + while job.completed_at is None: + backoffTimer.sleep() + job = self.get_by_id(job_id) + logger.debug(f"\tJob {job_id} progress={job.progress}") + + logger.info("Job {} Completed: Finish Code: {} - Notes:{}".format(job_id, job.finish_code, job.notes)) + + if job.finish_code == JobItem.FinishCode.Success: + return job + elif job.finish_code == JobItem.FinishCode.Failed: + raise JobFailedException(job) + elif job.finish_code == JobItem.FinishCode.Cancelled: + raise JobCanceledException(job) + else: + raise AssertionError("Unexpected finish_code in job", job) diff --git a/test/_utils.py b/test/_utils.py index ecabf53a4..93d7a9334 100644 --- a/test/_utils.py +++ b/test/_utils.py @@ -1,3 +1,5 @@ +from contextlib import contextmanager +import unittest import os.path TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets') @@ -14,3 +16,19 @@ def read_xml_asset(filename): def read_xml_assets(*args): return map(read_xml_asset, args) + + +@contextmanager +def mocked_time(): + mock_time = 0 + + def sleep_mock(interval): + nonlocal mock_time + mock_time += interval + + def get_time(): + return mock_time + + patch = unittest.mock.patch + with patch("time.sleep", sleep_mock), patch("time.time", get_time): + yield get_time diff --git a/test/test_datasource.py b/test/test_datasource.py index 68d6d1384..4c65e8dc9 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -317,7 +317,7 @@ def test_publish_async(self): self.assertEqual('PublishDatasource', new_job.type) self.assertEqual('0', new_job.progress) self.assertEqual('2018-06-30T00:54:54Z', format_datetime(new_job.created_at)) - self.assertEqual('1', new_job.finish_code) + self.assertEqual(1, new_job.finish_code) def test_publish_unnamed_file_object(self): new_datasource = TSC.DatasourceItem('test') diff --git a/test/test_exponential_backoff.py b/test/test_exponential_backoff.py new file mode 100644 index 000000000..57229d4ce --- /dev/null +++ b/test/test_exponential_backoff.py @@ -0,0 +1,62 @@ +import unittest +from ._utils import mocked_time +from tableauserverclient.exponential_backoff import ExponentialBackoffTimer + + +class ExponentialBackoffTests(unittest.TestCase): + def test_exponential(self): + with mocked_time() as mock_time: + exponentialBackoff = ExponentialBackoffTimer() + # The creation of our mock shouldn't sleep + self.assertAlmostEqual(mock_time(), 0) + # The first sleep sleeps for a rather short time, the following sleeps become longer + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 0.5) + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 1.2) + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 2.18) + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 3.552) + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 5.4728) + + + def test_exponential_saturation(self): + with mocked_time() as mock_time: + exponentialBackoff = ExponentialBackoffTimer() + for _ in range(99): + exponentialBackoff.sleep() + # We don't increase the sleep time above 30 seconds. + # Otherwise, the exponential sleep time could easily + # reach minutes or even hours between polls + for _ in range(5): + s = mock_time() + exponentialBackoff.sleep() + slept = mock_time() - s + self.assertAlmostEqual(slept, 30) + + + def test_timeout(self): + with mocked_time() as mock_time: + exponentialBackoff = ExponentialBackoffTimer(timeout=4.5) + for _ in range(4): + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 3.552) + # Usually, the following sleep would sleep until 5.5, but due to + # the timeout we wait less; thereby we make sure to take the timeout + # into account as good as possible + exponentialBackoff.sleep() + self.assertAlmostEqual(mock_time(), 4.5) + # The next call to `sleep` will raise a TimeoutError + with self.assertRaises(TimeoutError): + exponentialBackoff.sleep() + + + def test_timeout_zero(self): + with mocked_time() as mock_time: + # The construction of the timer doesn't throw, yet + exponentialBackoff = ExponentialBackoffTimer(timeout = 0) + # But the first `sleep` immediately throws + with self.assertRaises(TimeoutError): + exponentialBackoff.sleep() diff --git a/test/test_job.py b/test/test_job.py index 08b98b815..70bca996c 100644 --- a/test/test_job.py +++ b/test/test_job.py @@ -4,12 +4,16 @@ import requests_mock import tableauserverclient as TSC from tableauserverclient.datetime_helpers import utc -from ._utils import read_xml_asset +from tableauserverclient.server.endpoint.exceptions import JobFailedException +from ._utils import read_xml_asset, mocked_time TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets') GET_XML = 'job_get.xml' GET_BY_ID_XML = 'job_get_by_id.xml' +GET_BY_ID_FAILED_XML = 'job_get_by_id_failed.xml' +GET_BY_ID_CANCELLED_XML = 'job_get_by_id_cancelled.xml' +GET_BY_ID_INPROGRESS_XML = 'job_get_by_id_inprogress.xml' class JobTests(unittest.TestCase): @@ -49,9 +53,6 @@ def test_get_by_id(self): m.get('{0}/{1}'.format(self.baseurl, job_id), text=response_xml) job = self.server.jobs.get_by_id(job_id) - created_at = datetime(2020, 5, 13, 20, 23, 45, tzinfo=utc) - updated_at = datetime(2020, 5, 13, 20, 25, 18, tzinfo=utc) - ended_at = datetime(2020, 5, 13, 20, 25, 18, tzinfo=utc) self.assertEqual(job_id, job.id) self.assertListEqual(job.notes, ['Job detail notes']) @@ -72,3 +73,35 @@ def test_cancel_item(self): with requests_mock.mock() as m: m.put(self.baseurl + '/ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', status_code=204) self.server.jobs.cancel(job) + + + def test_wait_for_job_finished(self): + # Waiting for an already finished job, directly returns that job's info + response_xml = read_xml_asset(GET_BY_ID_XML) + job_id = '2eef4225-aa0c-41c4-8662-a76d89ed7336' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, job_id), text=response_xml) + job = self.server.jobs.wait_for_job(job_id) + + self.assertEqual(job_id, job.id) + self.assertListEqual(job.notes, ['Job detail notes']) + + + def test_wait_for_job_failed(self): + # Waiting for a failed job raises an exception + response_xml = read_xml_asset(GET_BY_ID_FAILED_XML) + job_id = '77d5e57a-2517-479f-9a3c-a32025f2b64d' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, job_id), text=response_xml) + with self.assertRaises(JobFailedException): + self.server.jobs.wait_for_job(job_id) + + + def test_wait_for_job_timeout(self): + # Waiting for a job which doesn't terminate will throw an exception + response_xml = read_xml_asset(GET_BY_ID_INPROGRESS_XML) + job_id = '77d5e57a-2517-479f-9a3c-a32025f2b64d' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, job_id), text=response_xml) + with self.assertRaises(TimeoutError): + self.server.jobs.wait_for_job(job_id, timeout=30) diff --git a/test/test_workbook.py b/test/test_workbook.py index d3a3b59b4..459b1f905 100644 --- a/test/test_workbook.py +++ b/test/test_workbook.py @@ -616,7 +616,7 @@ def test_publish_async(self): self.assertEqual('PublishWorkbook', new_job.type) self.assertEqual('0', new_job.progress) self.assertEqual('2018-06-29T23:22:32Z', format_datetime(new_job.created_at)) - self.assertEqual('1', new_job.finish_code) + self.assertEqual(1, new_job.finish_code) def test_publish_invalid_file(self): new_workbook = TSC.WorkbookItem('test', 'ee8c6e70-43b6-11e6-af4f-f7b0d8e20760') From a1c3f94466cba2bde6312fa45000f06221132c3b Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 7 Oct 2021 01:33:12 +0200 Subject: [PATCH 25/30] Remove `basestring` hack for Python 2.x compatibility TSC only supports Python 3.5+ and Python 2.7 reached end-of-life already on Jan 1st, 2020. Let's remove that hack from our code, and move on... --- tableauserverclient/models/property_decorators.py | 8 +------- tableauserverclient/server/endpoint/jobs_endpoint.py | 8 +------- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index b3466dea7..ea2a62380 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -3,12 +3,6 @@ from functools import wraps from ..datetime_helpers import parse_datetime -try: - basestring -except NameError: - # In case we are in python 3 the string check is different - basestring = str - def property_is_enum(enum_type): def property_type_decorator(func): @@ -134,7 +128,7 @@ def property_is_datetime(func): def wrapper(self, value): if isinstance(value, datetime.datetime): return func(self, value) - if not isinstance(value, basestring): + if not isinstance(value, str): raise ValueError( "Cannot convert {} into a datetime, cannot update {}".format(value.__class__.__name__, func.__name__) ) diff --git a/tableauserverclient/server/endpoint/jobs_endpoint.py b/tableauserverclient/server/endpoint/jobs_endpoint.py index 906d4a19e..4c975c523 100644 --- a/tableauserverclient/server/endpoint/jobs_endpoint.py +++ b/tableauserverclient/server/endpoint/jobs_endpoint.py @@ -6,12 +6,6 @@ import logging -try: - basestring -except NameError: - # In case we are in python 3 the string check is different - basestring = str - logger = logging.getLogger("tableau.endpoint.jobs") class Jobs(Endpoint): @@ -22,7 +16,7 @@ def baseurl(self): @api(version="2.6") def get(self, job_id=None, req_options=None): # Backwards Compatibility fix until we rev the major version - if job_id is not None and isinstance(job_id, basestring): + if job_id is not None and isinstance(job_id, str): import warnings warnings.warn("Jobs.get(job_id) is deprecated, update code to use Jobs.get_by_id(job_id)") From acda7f57d24c505da7da97e9be98dc3b427eda61 Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 7 Oct 2021 13:54:52 +0200 Subject: [PATCH 26/30] Add missing test assets Should have been part of #903, but I forgot to `git add` them :/ --- test/assets/job_get_by_id_failed.xml | 9 +++++++++ test/assets/job_get_by_id_inprogress.xml | 9 +++++++++ 2 files changed, 18 insertions(+) create mode 100644 test/assets/job_get_by_id_failed.xml create mode 100644 test/assets/job_get_by_id_inprogress.xml diff --git a/test/assets/job_get_by_id_failed.xml b/test/assets/job_get_by_id_failed.xml new file mode 100644 index 000000000..c7456008e --- /dev/null +++ b/test/assets/job_get_by_id_failed.xml @@ -0,0 +1,9 @@ + + + + + + c569ee62-9204-416f-843d-5ccfebc0231b + + + \ No newline at end of file diff --git a/test/assets/job_get_by_id_inprogress.xml b/test/assets/job_get_by_id_inprogress.xml new file mode 100644 index 000000000..7a23fb99d --- /dev/null +++ b/test/assets/job_get_by_id_inprogress.xml @@ -0,0 +1,9 @@ + + + + + + c569ee62-9204-416f-843d-5ccfebc0231b + + + \ No newline at end of file From 168638c2d04deef07bc5a82794276b0b18994b5e Mon Sep 17 00:00:00 2001 From: Adrian Vogelsgesang Date: Thu, 7 Oct 2021 14:05:40 +0200 Subject: [PATCH 27/30] Stop supporting Python 3.5 Python 3.5 is already end-of-life and no longer receives security patches for over a year now. Also, I recently added an f-string because all Python versions which are still in support, also support f-strings. I was unpleasantly surprised when I had to realize that we still claim to support Python 3.5 which doesn't have f-strings... --- .github/workflows/run-tests.yml | 2 +- README.md | 2 +- samples/add_default_permission.py | 2 +- samples/create_group.py | 2 +- samples/create_project.py | 2 +- samples/create_schedules.py | 2 +- samples/download_view_image.py | 2 +- samples/export.py | 2 +- samples/export_wb.py | 2 +- samples/filter_sort_groups.py | 2 +- samples/filter_sort_projects.py | 2 +- samples/kill_all_jobs.py | 2 +- samples/list.py | 2 +- samples/login.py | 2 +- samples/metadata_query.py | 2 +- samples/move_workbook_projects.py | 2 +- samples/move_workbook_sites.py | 2 +- samples/publish_datasource.py | 2 +- samples/publish_workbook.py | 2 +- samples/query_permissions.py | 2 +- samples/refresh.py | 2 +- samples/refresh_tasks.py | 2 +- samples/set_http_options.py | 2 +- samples/set_refresh_schedule.py | 2 +- samples/update_connection.py | 2 +- 25 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 9a51ac7a9..61476132f 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -8,7 +8,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.5, 3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2] + python-version: [3.6, 3.7, 3.8, 3.9, 3.10.0-rc.2] runs-on: ${{ matrix.os }} diff --git a/README.md b/README.md index a5445e052..b454dd4c7 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Use the Tableau Server Client (TSC) library to increase your productivity as you * Create users and groups. * Query projects, sites, and more. -This repository contains Python source code and sample files. Python versions 3.5 and up are supported. +This repository contains Python source code and sample files. Python versions 3.6 and up are supported. For more information on installing and using TSC, see the documentation: diff --git a/samples/add_default_permission.py b/samples/add_default_permission.py index 77ad58a11..8018c7b30 100644 --- a/samples/add_default_permission.py +++ b/samples/add_default_permission.py @@ -1,6 +1,6 @@ #### # This script demonstrates how to add default permissions using TSC -# To run the script, you must have installed Python 3.5 and later. +# To run the script, you must have installed Python 3.6 or later. # # In order to demonstrate adding a new default permission, this sample will create # a new project and add a new capability to the new project, for the default "All users" group. diff --git a/samples/create_group.py b/samples/create_group.py index 4459eb96a..ad0e6cc4f 100644 --- a/samples/create_group.py +++ b/samples/create_group.py @@ -2,7 +2,7 @@ # This script demonstrates how to create a group using the Tableau # Server Client. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### diff --git a/samples/create_project.py b/samples/create_project.py index b3b28c2dc..814d35617 100644 --- a/samples/create_project.py +++ b/samples/create_project.py @@ -4,7 +4,7 @@ # parent_id. # # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/create_schedules.py b/samples/create_schedules.py index 3c2627bf6..39332713b 100644 --- a/samples/create_schedules.py +++ b/samples/create_schedules.py @@ -2,7 +2,7 @@ # This script demonstrates how to create schedules using the Tableau # Server Client. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### diff --git a/samples/download_view_image.py b/samples/download_view_image.py index 17cc2000b..3ac2ed4d5 100644 --- a/samples/download_view_image.py +++ b/samples/download_view_image.py @@ -5,7 +5,7 @@ # For more information, refer to the documentations on 'Query View Image' # (https://onlinehelp.tableau.com/current/api/rest_api/en-us/help.htm) # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/export.py b/samples/export.py index 2b6de57f9..6317ec53b 100644 --- a/samples/export.py +++ b/samples/export.py @@ -2,7 +2,7 @@ # This script demonstrates how to export a view using the Tableau # Server Client. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/export_wb.py b/samples/export_wb.py index a9b4d60be..2be476130 100644 --- a/samples/export_wb.py +++ b/samples/export_wb.py @@ -4,7 +4,7 @@ # # You will need to do `pip install PyPDF2` to use this sample. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### diff --git a/samples/filter_sort_groups.py b/samples/filter_sort_groups.py index 7f160f66d..24dee791d 100644 --- a/samples/filter_sort_groups.py +++ b/samples/filter_sort_groups.py @@ -2,7 +2,7 @@ # This script demonstrates how to filter and sort groups using the Tableau # Server Client. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### diff --git a/samples/filter_sort_projects.py b/samples/filter_sort_projects.py index e4f695fda..23b350fa6 100644 --- a/samples/filter_sort_projects.py +++ b/samples/filter_sort_projects.py @@ -2,7 +2,7 @@ # This script demonstrates how to use the Tableau Server Client # to filter and sort on the name of the projects present on site. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/kill_all_jobs.py b/samples/kill_all_jobs.py index f9fa173e5..196da4b01 100644 --- a/samples/kill_all_jobs.py +++ b/samples/kill_all_jobs.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to kill all of the running jobs # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/list.py b/samples/list.py index 8a6407e0d..867757668 100644 --- a/samples/list.py +++ b/samples/list.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to list all of the workbooks or datasources # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/login.py b/samples/login.py index eec967e8d..c8af97505 100644 --- a/samples/login.py +++ b/samples/login.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to log in to Tableau Server Client. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/metadata_query.py b/samples/metadata_query.py index 7cd321f0a..c9cf7394c 100644 --- a/samples/metadata_query.py +++ b/samples/metadata_query.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to use the metadata API to query information on a published data source # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/move_workbook_projects.py b/samples/move_workbook_projects.py index 62189370c..c8227aeda 100644 --- a/samples/move_workbook_projects.py +++ b/samples/move_workbook_projects.py @@ -4,7 +4,7 @@ # a workbook that matches a given name and update it to be in # the desired project. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/move_workbook_sites.py b/samples/move_workbook_sites.py index 8a97031a9..e0475ac06 100644 --- a/samples/move_workbook_sites.py +++ b/samples/move_workbook_sites.py @@ -4,7 +4,7 @@ # a workbook that matches a given name, download the workbook, # and then publish it to the destination site. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/publish_datasource.py b/samples/publish_datasource.py index 0d7f936c2..8ae744185 100644 --- a/samples/publish_datasource.py +++ b/samples/publish_datasource.py @@ -15,7 +15,7 @@ # more information on personal access tokens, refer to the documentations: # (https://help.tableau.com/current/server/en-us/security_personal_access_tokens.htm) # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/publish_workbook.py b/samples/publish_workbook.py index 58a158b12..fcfcddc15 100644 --- a/samples/publish_workbook.py +++ b/samples/publish_workbook.py @@ -11,7 +11,7 @@ # For more information, refer to the documentations on 'Publish Workbook' # (https://onlinehelp.tableau.com/current/api/rest_api/en-us/help.htm) # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/query_permissions.py b/samples/query_permissions.py index 457d534ec..0909f915d 100644 --- a/samples/query_permissions.py +++ b/samples/query_permissions.py @@ -1,6 +1,6 @@ #### # This script demonstrates how to query for permissions using TSC -# To run the script, you must have installed Python 3.5 and later. +# To run the script, you must have installed Python 3.6 or later. # # Example usage: 'python query_permissions.py -s https://10ax.online.tableau.com --site # devSite123 -u tabby@tableau.com workbook b4065286-80f0-11ea-af1b-cb7191f48e45' diff --git a/samples/refresh.py b/samples/refresh.py index 7b2618b6e..3eed5b4be 100644 --- a/samples/refresh.py +++ b/samples/refresh.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to use trigger a refresh on a datasource or workbook # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/refresh_tasks.py b/samples/refresh_tasks.py index 01f574ee4..bf69d064a 100644 --- a/samples/refresh_tasks.py +++ b/samples/refresh_tasks.py @@ -2,7 +2,7 @@ # This script demonstrates how to use the Tableau Server Client # to query extract refresh tasks and run them as needed. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/set_http_options.py b/samples/set_http_options.py index 8fad2a10c..40ed9167e 100644 --- a/samples/set_http_options.py +++ b/samples/set_http_options.py @@ -2,7 +2,7 @@ # This script demonstrates how to set http options. It will set the option # to not verify SSL certificate, and query all workbooks on site. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse diff --git a/samples/set_refresh_schedule.py b/samples/set_refresh_schedule.py index 37526ccc8..862ea2372 100644 --- a/samples/set_refresh_schedule.py +++ b/samples/set_refresh_schedule.py @@ -2,7 +2,7 @@ # This script demonstrates how to set the refresh schedule for # a workbook or datasource. # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### diff --git a/samples/update_connection.py b/samples/update_connection.py index 7ac67fd76..0e87217e8 100644 --- a/samples/update_connection.py +++ b/samples/update_connection.py @@ -1,7 +1,7 @@ #### # This script demonstrates how to update a connections credentials on a server to embed the credentials # -# To run the script, you must have installed Python 3.5 or later. +# To run the script, you must have installed Python 3.6 or later. #### import argparse From 55dd640100c2d64b734815f12c3e14f7f20d18c1 Mon Sep 17 00:00:00 2001 From: Jac Fitzgerald Date: Sat, 9 Oct 2021 00:35:51 -0700 Subject: [PATCH 28/30] un-re-over-merge --- tableauserverclient/models/datasource_item.py | 5 ++++- test/assets/datasource_get_by_id.xml | 2 +- test/test_datasource.py | 1 - 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/models/datasource_item.py b/tableauserverclient/models/datasource_item.py index 6c449408c..5b23341d0 100644 --- a/tableauserverclient/models/datasource_item.py +++ b/tableauserverclient/models/datasource_item.py @@ -136,6 +136,10 @@ def datasource_type(self): def description(self): return self._description + @description.setter + def description(self, value): + self._description = value + @property def updated_at(self): return self._updated_at @@ -174,7 +178,6 @@ def _parse_common_elements(self, datasource_xml, ns): _, _, _, - _, encrypt_extracts, has_extracts, _, diff --git a/test/assets/datasource_get_by_id.xml b/test/assets/datasource_get_by_id.xml index d5dcf89ee..53434b8cc 100644 --- a/test/assets/datasource_get_by_id.xml +++ b/test/assets/datasource_get_by_id.xml @@ -1,6 +1,6 @@ - + diff --git a/test/test_datasource.py b/test/test_datasource.py index ae2f85c23..52a5eabe3 100644 --- a/test/test_datasource.py +++ b/test/test_datasource.py @@ -105,7 +105,6 @@ def test_get_by_id(self): self.assertEqual('ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', single_datasource.project_id) self.assertEqual('5de011f8-5aa9-4d5b-b991-f462c8dd6bb7', single_datasource.owner_id) self.assertEqual(set(['world', 'indicators', 'sample']), single_datasource.tags) - self.assertEqual("test-ds", single_datasource.description) self.assertEqual(TSC.DatasourceItem.AskDataEnablement.SiteDefault, single_datasource.ask_data_enablement) def test_update(self): From 428eb550dbeee9aea0e7941f67ceb37819c439fa Mon Sep 17 00:00:00 2001 From: jorwoods Date: Tue, 19 Oct 2021 12:32:51 -0500 Subject: [PATCH 29/30] Add FlowRun Item and Endpoints. (#884) * Add tests for fetching flow runs * Implement basics of FlowRuns * Add tests for cancel flow run * Make FlowRuns a Queryset endpoint for easier filtering * Add test for flow refresh endpoint * Align to naming conventions * Apply name change consistently * Change flowrun_id into flow_run_id * Add wait_for_job to FlowRun * Tag wait_for_job with version number * Rewrite flow_run to use ExponentialBackoffTimer * Test flow run wait with backoff * Remove 3.5 from test matrix * Standardize spelling of cancelled Co-authored-by: Jordan Woods --- tableauserverclient/__init__.py | 1 + tableauserverclient/models/__init__.py | 1 + tableauserverclient/models/flow_run_item.py | 106 ++++++++++++++++++ tableauserverclient/models/job_item.py | 19 ++++ tableauserverclient/server/__init__.py | 1 + .../server/endpoint/__init__.py | 1 + .../server/endpoint/exceptions.py | 13 ++- .../server/endpoint/flow_runs_endpoint.py | 76 +++++++++++++ .../server/endpoint/jobs_endpoint.py | 5 +- tableauserverclient/server/server.py | 2 + test/_utils.py | 5 +- test/assets/flow_refresh.xml | 11 ++ test/assets/flow_runs_get.xml | 19 ++++ test/assets/flow_runs_get_by_id.xml | 10 ++ test/assets/flow_runs_get_by_id_failed.xml | 10 ++ .../assets/flow_runs_get_by_id_inprogress.xml | 10 ++ test/test_flow.py | 20 ++++ test/test_flowruns.py | 104 +++++++++++++++++ 18 files changed, 410 insertions(+), 4 deletions(-) create mode 100644 tableauserverclient/models/flow_run_item.py create mode 100644 tableauserverclient/server/endpoint/flow_runs_endpoint.py create mode 100644 test/assets/flow_refresh.xml create mode 100644 test/assets/flow_runs_get.xml create mode 100644 test/assets/flow_runs_get_by_id.xml create mode 100644 test/assets/flow_runs_get_by_id_failed.xml create mode 100644 test/assets/flow_runs_get_by_id_inprogress.xml create mode 100644 test/test_flowruns.py diff --git a/tableauserverclient/__init__.py b/tableauserverclient/__init__.py index fcce4e0c7..2ad65d71e 100644 --- a/tableauserverclient/__init__.py +++ b/tableauserverclient/__init__.py @@ -34,6 +34,7 @@ FlowItem, WebhookItem, PersonalAccessTokenAuth, + FlowRunItem ) from .server import ( RequestOptions, diff --git a/tableauserverclient/models/__init__.py b/tableauserverclient/models/__init__.py index c0ddc2e75..e5945782d 100644 --- a/tableauserverclient/models/__init__.py +++ b/tableauserverclient/models/__init__.py @@ -10,6 +10,7 @@ from .favorites_item import FavoriteItem from .group_item import GroupItem from .flow_item import FlowItem +from .flow_run_item import FlowRunItem from .interval_item import ( IntervalItem, DailyInterval, diff --git a/tableauserverclient/models/flow_run_item.py b/tableauserverclient/models/flow_run_item.py new file mode 100644 index 000000000..251c667b1 --- /dev/null +++ b/tableauserverclient/models/flow_run_item.py @@ -0,0 +1,106 @@ +import xml.etree.ElementTree as ET +from ..datetime_helpers import parse_datetime +import itertools + + +class FlowRunItem(object): + def __init__(self) -> None: + self._id=None + self._flow_id=None + self._status=None + self._started_at=None + self._completed_at=None + self._progress=None + self._background_job_id=None + + + @property + def id(self): + return self._id + + + @property + def flow_id(self): + return self._flow_id + + + @property + def status(self): + return self._status + + + @property + def started_at(self): + return self._started_at + + + @property + def completed_at(self): + return self._completed_at + + + @property + def progress(self): + return self._progress + + + @property + def background_job_id(self): + return self._background_job_id + + + def _set_values( + self, + id, + flow_id, + status, + started_at, + completed_at, + progress, + background_job_id, + ): + if id is not None: + self._id = id + if flow_id is not None: + self._flow_id = flow_id + if status is not None: + self._status = status + if started_at is not None: + self._started_at = started_at + if completed_at is not None: + self._completed_at = completed_at + if progress is not None: + self._progress = progress + if background_job_id is not None: + self._background_job_id = background_job_id + + + @classmethod + def from_response(cls, resp, ns): + all_flowrun_items = list() + parsed_response = ET.fromstring(resp) + all_flowrun_xml = itertools.chain( + parsed_response.findall(".//t:flowRun[@id]", namespaces=ns), + parsed_response.findall(".//t:flowRuns[@id]", namespaces=ns) + ) + + for flowrun_xml in all_flowrun_xml: + parsed = cls._parse_element(flowrun_xml, ns) + flowrun_item = cls() + flowrun_item._set_values(**parsed) + all_flowrun_items.append(flowrun_item) + return all_flowrun_items + + + @staticmethod + def _parse_element(flowrun_xml, ns): + result = {} + result['id'] = flowrun_xml.get("id", None) + result['flow_id'] = flowrun_xml.get("flowId", None) + result['status'] = flowrun_xml.get("status", None) + result['started_at'] = parse_datetime(flowrun_xml.get("startedAt", None)) + result['completed_at'] = parse_datetime(flowrun_xml.get("completedAt", None)) + result['progress'] = flowrun_xml.get("progress", None) + result['background_job_id'] = flowrun_xml.get("backgroundJobId", None) + + return result diff --git a/tableauserverclient/models/job_item.py b/tableauserverclient/models/job_item.py index 2a8b6b509..8c21b24e6 100644 --- a/tableauserverclient/models/job_item.py +++ b/tableauserverclient/models/job_item.py @@ -1,4 +1,5 @@ import xml.etree.ElementTree as ET +from .flow_run_item import FlowRunItem from ..datetime_helpers import parse_datetime @@ -24,6 +25,7 @@ def __init__( finish_code=0, notes=None, mode=None, + flow_run=None, ): self._id = id_ self._type = job_type @@ -34,6 +36,7 @@ def __init__( self._finish_code = finish_code self._notes = notes or [] self._mode = mode + self._flow_run = flow_run @property def id(self): @@ -76,6 +79,14 @@ def mode(self, value): # check for valid data here self._mode = value + @property + def flow_run(self): + return self._flow_run + + @flow_run.setter + def flow_run(self, value): + self._flow_run = value + def __repr__(self): return ( " + + + + + + + \ No newline at end of file diff --git a/test/assets/flow_runs_get.xml b/test/assets/flow_runs_get.xml new file mode 100644 index 000000000..bdce4cdfb --- /dev/null +++ b/test/assets/flow_runs_get.xml @@ -0,0 +1,19 @@ + + + + + + + \ No newline at end of file diff --git a/test/assets/flow_runs_get_by_id.xml b/test/assets/flow_runs_get_by_id.xml new file mode 100644 index 000000000..3a768fab4 --- /dev/null +++ b/test/assets/flow_runs_get_by_id.xml @@ -0,0 +1,10 @@ + + + + \ No newline at end of file diff --git a/test/assets/flow_runs_get_by_id_failed.xml b/test/assets/flow_runs_get_by_id_failed.xml new file mode 100644 index 000000000..9e766680b --- /dev/null +++ b/test/assets/flow_runs_get_by_id_failed.xml @@ -0,0 +1,10 @@ + + + + \ No newline at end of file diff --git a/test/assets/flow_runs_get_by_id_inprogress.xml b/test/assets/flow_runs_get_by_id_inprogress.xml new file mode 100644 index 000000000..42e1a77f9 --- /dev/null +++ b/test/assets/flow_runs_get_by_id_inprogress.xml @@ -0,0 +1,10 @@ + + + + \ No newline at end of file diff --git a/test/test_flow.py b/test/test_flow.py index f5c057c30..545623d03 100644 --- a/test/test_flow.py +++ b/test/test_flow.py @@ -12,6 +12,7 @@ POPULATE_CONNECTIONS_XML = 'flow_populate_connections.xml' POPULATE_PERMISSIONS_XML = 'flow_populate_permissions.xml' UPDATE_XML = 'flow_update.xml' +REFRESH_XML = 'flow_refresh.xml' class FlowTests(unittest.TestCase): @@ -113,3 +114,22 @@ def test_populate_permissions(self): TSC.Permission.Capability.Write: TSC.Permission.Mode.Allow, TSC.Permission.Capability.Read: TSC.Permission.Mode.Allow, }) + + def test_refresh(self): + with open(asset(REFRESH_XML), 'rb') as f: + response_xml = f.read().decode('utf-8') + with requests_mock.mock() as m: + m.post(self.baseurl + '/92967d2d-c7e2-46d0-8847-4802df58f484/run', text=response_xml) + flow_item = TSC.FlowItem('test') + flow_item._id = '92967d2d-c7e2-46d0-8847-4802df58f484' + refresh_job = self.server.flows.refresh(flow_item) + + self.assertEqual(refresh_job.id, 'd1b2ccd0-6dfa-444a-aee4-723dbd6b7c9d') + self.assertEqual(refresh_job.mode, 'Asynchronous') + self.assertEqual(refresh_job.type, 'RunFlow') + self.assertEqual(format_datetime(refresh_job.created_at), '2018-05-22T13:00:29Z') + self.assertIsInstance(refresh_job.flow_run, TSC.FlowRunItem) + self.assertEqual(refresh_job.flow_run.id, 'e0c3067f-2333-4eee-8028-e0a56ca496f6') + self.assertEqual(refresh_job.flow_run.flow_id, '92967d2d-c7e2-46d0-8847-4802df58f484') + self.assertEqual(format_datetime(refresh_job.flow_run.started_at), '2018-05-22T13:00:29Z') + diff --git a/test/test_flowruns.py b/test/test_flowruns.py new file mode 100644 index 000000000..d2e72f31a --- /dev/null +++ b/test/test_flowruns.py @@ -0,0 +1,104 @@ +import unittest +import os +import requests_mock +import xml.etree.ElementTree as ET +import tableauserverclient as TSC +from tableauserverclient.datetime_helpers import format_datetime +from tableauserverclient.server.endpoint.exceptions import FlowRunFailedException +from tableauserverclient.server.request_factory import RequestFactory +from ._utils import read_xml_asset, mocked_time + +GET_XML = 'flow_runs_get.xml' +GET_BY_ID_XML = 'flow_runs_get_by_id.xml' +GET_BY_ID_FAILED_XML = 'flow_runs_get_by_id_failed.xml' +GET_BY_ID_INPROGRESS_XML = 'flow_runs_get_by_id_inprogress.xml' + + +class FlowRunTests(unittest.TestCase): + def setUp(self): + self.server = TSC.Server('http://test') + + # Fake signin + self.server._site_id = 'dad65087-b08b-4603-af4e-2887b8aafc67' + self.server._auth_token = 'j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM' + self.server.version = "3.10" + + self.baseurl = self.server.flow_runs.baseurl + + def test_get(self): + response_xml = read_xml_asset(GET_XML) + with requests_mock.mock() as m: + m.get(self.baseurl, text=response_xml) + all_flow_runs, pagination_item = self.server.flow_runs.get() + + self.assertEqual(2, pagination_item.total_available) + self.assertEqual('cc2e652d-4a9b-4476-8c93-b238c45db968', all_flow_runs[0].id) + self.assertEqual('2021-02-11T01:42:55Z', format_datetime(all_flow_runs[0].started_at)) + self.assertEqual('2021-02-11T01:57:38Z', format_datetime(all_flow_runs[0].completed_at)) + self.assertEqual('Success', all_flow_runs[0].status) + self.assertEqual('100', all_flow_runs[0].progress) + self.assertEqual('aa23f4ac-906f-11e9-86fb-3f0f71412e77', all_flow_runs[0].background_job_id) + + self.assertEqual('a3104526-c0c6-4ea5-8362-e03fc7cbd7ee', all_flow_runs[1].id) + self.assertEqual('2021-02-13T04:05:30Z', format_datetime(all_flow_runs[1].started_at)) + self.assertEqual('2021-02-13T04:05:35Z', format_datetime(all_flow_runs[1].completed_at)) + self.assertEqual('Failed', all_flow_runs[1].status) + self.assertEqual('100', all_flow_runs[1].progress) + self.assertEqual('1ad21a9d-2530-4fbf-9064-efd3c736e023', all_flow_runs[1].background_job_id) + + def test_get_by_id(self): + response_xml = read_xml_asset(GET_BY_ID_XML) + with requests_mock.mock() as m: + m.get(self.baseurl + "/cc2e652d-4a9b-4476-8c93-b238c45db968", text=response_xml) + flow_run = self.server.flow_runs.get_by_id("cc2e652d-4a9b-4476-8c93-b238c45db968") + + self.assertEqual('cc2e652d-4a9b-4476-8c93-b238c45db968', flow_run.id) + self.assertEqual('2021-02-11T01:42:55Z', format_datetime(flow_run.started_at)) + self.assertEqual('2021-02-11T01:57:38Z', format_datetime(flow_run.completed_at)) + self.assertEqual('Success', flow_run.status) + self.assertEqual('100', flow_run.progress) + self.assertEqual('1ad21a9d-2530-4fbf-9064-efd3c736e023', flow_run.background_job_id) + + def test_cancel_id(self): + with requests_mock.mock() as m: + m.put(self.baseurl + '/ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', status_code=204) + self.server.flow_runs.cancel('ee8c6e70-43b6-11e6-af4f-f7b0d8e20760') + + def test_cancel_item(self): + run = TSC.FlowRunItem() + run._id = 'ee8c6e70-43b6-11e6-af4f-f7b0d8e20760' + with requests_mock.mock() as m: + m.put(self.baseurl + '/ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', status_code=204) + self.server.flow_runs.cancel(run) + + + def test_wait_for_job_finished(self): + # Waiting for an already finished job, directly returns that job's info + response_xml = read_xml_asset(GET_BY_ID_XML) + flow_run_id = 'cc2e652d-4a9b-4476-8c93-b238c45db968' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, flow_run_id), text=response_xml) + flow_run = self.server.flow_runs.wait_for_job(flow_run_id) + + self.assertEqual(flow_run_id, flow_run.id) + self.assertEqual(flow_run.progress, "100") + + + def test_wait_for_job_failed(self): + # Waiting for a failed job raises an exception + response_xml = read_xml_asset(GET_BY_ID_FAILED_XML) + flow_run_id = 'c2b35d5a-e130-471a-aec8-7bc5435fe0e7' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, flow_run_id), text=response_xml) + with self.assertRaises(FlowRunFailedException): + self.server.flow_runs.wait_for_job(flow_run_id) + + + def test_wait_for_job_timeout(self): + # Waiting for a job which doesn't terminate will throw an exception + response_xml = read_xml_asset(GET_BY_ID_INPROGRESS_XML) + flow_run_id = '71afc22c-9c06-40be-8d0f-4c4166d29e6c' + with mocked_time(), requests_mock.mock() as m: + m.get('{0}/{1}'.format(self.baseurl, flow_run_id), text=response_xml) + with self.assertRaises(TimeoutError): + self.server.flow_runs.wait_for_job(flow_run_id, timeout=30) From 46bbe2ede00cc0ae1de383def7e7ca653b36c158 Mon Sep 17 00:00:00 2001 From: mmuttreja-tableau <87720143+mmuttreja-tableau@users.noreply.github.com> Date: Wed, 20 Oct 2021 17:26:45 -0400 Subject: [PATCH 30/30] Update contributors and Changelog for Release 0.17 (#920) * Update CONTRIBUTORS.md & changelog for v 0.17 Update contributors & changelog for v 0.17 --- CHANGELOG.md | 8 ++++++++ CONTRIBUTORS.md | 1 + 2 files changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c4c9197f5..e375f8385 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +## 0.17.0 (20 October 2021) +Update publish.sh to use python3 (#866) +Fixed jobs.get_by_id(job_id) example & reference docs (#867, #868) +Fixed handling for workbooks in personal spaces which do not have projectID or Name (#875) +Updated links to Data Source Methods page in REST API docs (#879) +Upgraded to newer Slack action provider (#880) +Added support to the package for getting flow run status, as well as the ability to cancel flow runs. (#884) + ## 0.16.0 (15 July 2021) * Documentation updates (#800, #818, #839, #842) * Fixed data alert repr in subscription item (#821) diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 74b20d93d..89b8d213c 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -59,3 +59,4 @@ The following people have contributed to this project to make it possible, and w * [Dan Zucker](https://github.com/dzucker-tab) * [Brian Cantoni](https://github.com/bcantoni) * [Ovini Nanayakkara](https://github.com/ovinis) +* [Manish Muttreja](https://github.com/mmuttreja-tableau)