From 542378920c8191bcf03e08a15e5cf09b928e56cb Mon Sep 17 00:00:00 2001 From: prijendev Date: Fri, 8 Oct 2021 14:29:48 +0530 Subject: [PATCH 1/5] Added code commits --- tap_zendesk/__init__.py | 15 ++++++++++++++ tap_zendesk/discover.py | 2 ++ tap_zendesk/http.py | 7 +++++++ tap_zendesk/streams.py | 40 +++++++++++++++++++++++++++++++++++++ test/unittests/test_http.py | 6 ++++++ test/unittests/test_init.py | 5 +++++ 6 files changed, 75 insertions(+) diff --git a/tap_zendesk/__init__.py b/tap_zendesk/__init__.py index 6982a0c..d8102c2 100755 --- a/tap_zendesk/__init__.py +++ b/tap_zendesk/__init__.py @@ -46,15 +46,18 @@ def request_metrics_patch(self, method, url, **kwargs): Session.request = request_metrics_patch # end patch +# Discover schemas for all streams and dump catalog def do_discover(client): LOGGER.info("Starting discover") catalog = {"streams": discover_streams(client)} json.dump(catalog, sys.stdout, indent=2) LOGGER.info("Finished discover") +# Check stream selected or not from metadata def stream_is_selected(mdata): return mdata.get((), {}).get('selected', False) +# Return list of all the selected streams in catalog def get_selected_streams(catalog): selected_stream_names = [] for stream in catalog.streams: @@ -68,6 +71,7 @@ def get_selected_streams(catalog): 'tickets': ['ticket_audits', 'ticket_metrics', 'ticket_comments'] } +# Return list of all the sub streams of the streams def get_sub_stream_names(): sub_stream_names = [] for parent_stream in SUB_STREAMS: @@ -77,6 +81,7 @@ def get_sub_stream_names(): class DependencyException(Exception): pass +# Validate and raise exceptions if sub-streams are selected but related parents not selected def validate_dependencies(selected_stream_ids): errs = [] msg_tmpl = ("Unable to extract {0} data. " @@ -90,11 +95,13 @@ def validate_dependencies(selected_stream_ids): if errs: raise DependencyException(" ".join(errs)) +# Populate class schemas for all the streams selected in the catalog def populate_class_schemas(catalog, selected_stream_names): for stream in catalog.streams: if stream.tap_stream_id in selected_stream_names: STREAMS[stream.tap_stream_id].stream = stream +# run sync mode def do_sync(client, catalog, state, config): selected_stream_names = get_selected_streams(catalog) @@ -102,9 +109,11 @@ def do_sync(client, catalog, state, config): populate_class_schemas(catalog, selected_stream_names) all_sub_stream_names = get_sub_stream_names() + # Loop over streams in catalog for stream in catalog.streams: stream_name = stream.tap_stream_id mdata = metadata.to_map(stream.metadata) + #If stream does not selected then skip it. if stream_name not in selected_stream_names: LOGGER.info("%s: Skipping - not selected", stream_name) continue @@ -120,11 +129,14 @@ def do_sync(client, catalog, state, config): # LOGGER.info("%s: Starting", stream_name) + # Write schema of streams to STDOUT key_properties = metadata.get(mdata, (), 'table-key-properties') singer.write_schema(stream_name, stream.schema.to_dict(), key_properties) sub_stream_names = SUB_STREAMS.get(stream_name) + # Populate class schemas and write a schema for the selected substreams of the stream if sub_stream_names: + # Loop over sub-streams of current stream for sub_stream_name in sub_stream_names: if sub_stream_name not in selected_stream_names: continue @@ -148,6 +160,7 @@ def do_sync(client, catalog, state, config): LOGGER.info("Finished sync") zendesk_metrics.log_aggregate_rates() +#Return dictionary of params for authentication using oauth def oauth_auth(args): if not set(OAUTH_CONFIG_KEYS).issubset(args.config.keys()): LOGGER.debug("OAuth authentication unavailable.") @@ -159,6 +172,7 @@ def oauth_auth(args): "oauth_token": args.config['access_token'], } +#Return dictionary of params for authentication using api_token def api_token_auth(args): if not set(API_TOKEN_CONFIG_KEYS).issubset(args.config.keys()): LOGGER.debug("API Token authentication unavailable.") @@ -171,6 +185,7 @@ def api_token_auth(args): "token": args.config['api_token'] } +#Return session object to pass in Zenpy class def get_session(config): """ Add partner information to requests Session object if specified in the config. """ if not all(k in config for k in ["marketplace_name", diff --git a/tap_zendesk/discover.py b/tap_zendesk/discover.py index 0bac705..c5a80f6 100644 --- a/tap_zendesk/discover.py +++ b/tap_zendesk/discover.py @@ -6,6 +6,7 @@ def get_abs_path(path): return os.path.join(os.path.dirname(os.path.realpath(__file__)), path) +# Load and return dictionary of referenced schemas from 'schemas/shared' def load_shared_schema_refs(): ref_sub_path = 'shared' shared_schemas_path = get_abs_path('schemas/' + ref_sub_path) @@ -20,6 +21,7 @@ def load_shared_schema_refs(): return shared_schema_refs + # Discover schemas, build metadata for all the steams and return catalog def discover_streams(client): streams = [] refs = load_shared_schema_refs() diff --git a/tap_zendesk/http.py b/tap_zendesk/http.py index 469e060..96b319c 100644 --- a/tap_zendesk/http.py +++ b/tap_zendesk/http.py @@ -7,6 +7,7 @@ LOGGER = singer.get_logger() +#Check whether error should be retried or not. def is_fatal(exception): status_code = exception.response.status_code @@ -18,6 +19,7 @@ def is_fatal(exception): return 400 <= status_code < 500 +#Call Api and retry 429 error. @backoff.on_exception(backoff.expo, requests.exceptions.HTTPError, max_tries=10, @@ -27,6 +29,7 @@ def call_api(url, params, headers): response.raise_for_status() return response +#Retrieve data with cursor based pagination of particular stream def get_cursor_based(url, access_token, cursor=None, **kwargs): headers = { 'Content-Type': 'application/json', @@ -50,6 +53,7 @@ def get_cursor_based(url, access_token, cursor=None, **kwargs): has_more = response_json['meta']['has_more'] + #If has_more is true, then fetch next page of data. while has_more: cursor = response_json['meta']['after_cursor'] params['page[after]'] = cursor @@ -60,6 +64,7 @@ def get_cursor_based(url, access_token, cursor=None, **kwargs): yield response_json has_more = response_json['meta']['has_more'] +#Retrieve data with offset based pagination of particular stream def get_offset_based(url, access_token, **kwargs): headers = { 'Content-Type': 'application/json', @@ -80,6 +85,7 @@ def get_offset_based(url, access_token, **kwargs): next_url = response_json.get('next_page') + #If next_url is true then fetch next page of data. while next_url: response = call_api(next_url, params=None, headers=headers) response_json = response.json() @@ -87,6 +93,7 @@ def get_offset_based(url, access_token, **kwargs): yield response_json next_url = response_json.get('next_page') +#Retrieve data from the incremental exports endpoint using cursor based pagination def get_incremental_export(url, access_token, start_time): headers = { 'Content-Type': 'application/json', diff --git a/tap_zendesk/streams.py b/tap_zendesk/streams.py index c707d51..0d911aa 100644 --- a/tap_zendesk/streams.py +++ b/tap_zendesk/streams.py @@ -64,9 +64,11 @@ def __init__(self, client=None, config=None): self.client = client self.config = config + #Get bookmark from the state file def get_bookmark(self, state): return utils.strptime_with_tz(singer.get_bookmark(state, self.name, self.replication_key)) + #Update bookmark in state file def update_bookmark(self, state, value): current_bookmark = self.get_bookmark(state) if value and utils.strptime_with_tz(value) > current_bookmark: @@ -79,6 +81,7 @@ def load_schema(self): schema = json.load(f) return self._add_custom_fields(schema) + # Return custom fields to add into a catalog for provided instance(stream) def _add_custom_fields(self, schema): # pylint: disable=no-self-use return schema @@ -86,12 +89,15 @@ def load_metadata(self): schema = self.load_schema() mdata = metadata.new() + # Write key properties and replication method to metadata mdata = metadata.write(mdata, (), 'table-key-properties', self.key_properties) mdata = metadata.write(mdata, (), 'forced-replication-method', self.replication_method) + # Write replication key to metadata if self.replication_key: mdata = metadata.write(mdata, (), 'valid-replication-keys', [self.replication_key]) + # Make inclusion automatic for the key properties and replication keys for field_name in schema['properties'].keys(): if field_name in self.key_properties or field_name == self.replication_key: mdata = metadata.write(mdata, ('properties', field_name), 'inclusion', 'automatic') @@ -152,6 +158,7 @@ class Organizations(Stream): endpoint = 'https://{}.zendesk.com/api/v2/organizations' item_key = 'organizations' + #Add custom fields for organization stream def _add_custom_fields(self, schema): endpoint = self.client.organizations.endpoint # NB: Zenpy doesn't have a public endpoint for this at time of writing @@ -180,6 +187,7 @@ class Users(Stream): replication_method = "INCREMENTAL" replication_key = "updated_at" + #Add custom fields for user stream def _add_custom_fields(self, schema): try: field_gen = self.client.user_fields() @@ -195,15 +203,20 @@ def sync(self, state): original_search_window_size = int(self.config.get('search_window_size', DEFAULT_SEARCH_WINDOW_SIZE)) search_window_size = original_search_window_size bookmark = self.get_bookmark(state) + #start as 1 second less than last saved bookmark start = bookmark - datetime.timedelta(seconds=1) + #end as start + search_window_size end = start + datetime.timedelta(seconds=search_window_size) + #sync_end as 1 minute less than current time sync_end = singer.utils.now() - datetime.timedelta(minutes=1) parsed_sync_end = singer.strftime(sync_end, "%Y-%m-%dT%H:%M:%SZ") # ASSUMPTION: updated_at value always comes back in utc num_retries = 0 + #Loop over start to sync_end for user records while start < sync_end: parsed_start = singer.strftime(start, "%Y-%m-%dT%H:%M:%SZ") + #parsed_end as minimum of sync_end and end parsed_end = min(singer.strftime(end, "%Y-%m-%dT%H:%M:%SZ"), parsed_sync_end) LOGGER.info("Querying for users between %s and %s", parsed_start, parsed_end) users = self.client.search("", updated_after=parsed_start, updated_before=parsed_end, type="user") @@ -223,6 +236,7 @@ def sync(self, state): # Consume the records to account for dates lower than window start users = [user for user in users] # pylint: disable=unnecessary-comprehension + #Check if record is older than start if not all(parsed_start <= user.updated_at for user in users): # Only retry up to 30 minutes (60 attempts at 30 seconds each) if num_retries < 60: @@ -245,6 +259,7 @@ def sync(self, state): # Assumes that the for loop got everything singer.write_state(state) + #If search_window_size less or equall than half of original window size then make it double. if search_window_size <= original_search_window_size // 2: search_window_size = search_window_size * 2 LOGGER.info("Successfully requested records. Doubling search window to %s seconds", search_window_size) @@ -264,19 +279,24 @@ class Tickets(CursorBasedExportStream): buf_time = 60 def _buffer_record(self, record): stream_name = record[0].tap_stream_id + #Set last_record_emit to current time if it is none if self.last_record_emit.get(stream_name) is None: self.last_record_emit[stream_name] = utils.now() + #Create buffer list if self.buf.get(stream_name) is None: self.buf[stream_name] = [] + #Append record to buffer self.buf[stream_name].append(record) + #If buffer time elapsed by 60 seconds then return true if (utils.now() - self.last_record_emit[stream_name]).total_seconds() > self.buf_time: self.last_record_emit[stream_name] = utils.now() return True return False + #Yield records from the buffer list def _empty_buffer(self): for stream_name, stream_buf in self.buf.items(): for rec in stream_buf: @@ -285,6 +305,7 @@ def _empty_buffer(self): def sync(self, state): #pylint: disable=too-many-statements bookmark = self.get_bookmark(state) + #Get records of tickets tickets = self.get_objects(bookmark) audits_stream = TicketAudits(self.client, self.config) @@ -302,17 +323,20 @@ def emit_sub_stream_metrics(sub_stream): if audits_stream.is_selected(): LOGGER.info("Syncing ticket_audits per ticket...") + #Loop over records of parent stream tickets for ticket in tickets: zendesk_metrics.capture('ticket') generated_timestamp_dt = datetime.datetime.utcfromtimestamp(ticket.get('generated_timestamp')).replace(tzinfo=pytz.UTC) self.update_bookmark(state, utils.strftime(generated_timestamp_dt)) ticket.pop('fields') # NB: Fields is a duplicate of custom_fields, remove before emitting + #Append ticket record in buffer and if buffer time elapsed by 60 seconds then set should_yield to true should_yield = self._buffer_record((self.stream, ticket)) if audits_stream.is_selected(): try: for audit in audits_stream.sync(ticket["id"]): + #Append audit record in buffer self._buffer_record(audit) except HTTPError as e: if e.response.status_code == 404: @@ -323,6 +347,7 @@ def emit_sub_stream_metrics(sub_stream): if metrics_stream.is_selected(): try: for metric in metrics_stream.sync(ticket["id"]): + #Append metric record in buffer self._buffer_record(metric) except HTTPError as e: if e.response.status_code == 404: @@ -335,6 +360,7 @@ def emit_sub_stream_metrics(sub_stream): # add ticket_id to ticket_comment so the comment can # be linked back to it's corresponding ticket for comment in comments_stream.sync(ticket["id"]): + #Append comment record in buffer self._buffer_record(comment) except HTTPError as e: if e.response.status_code == 404: @@ -343,6 +369,7 @@ def emit_sub_stream_metrics(sub_stream): raise e if should_yield: + #Yield records from the buffer list for rec in self._empty_buffer(): yield rec emit_sub_stream_metrics(audits_stream) @@ -364,6 +391,7 @@ class TicketAudits(Stream): endpoint='https://{}.zendesk.com/api/v2/tickets/{}/audits.json' item_key='audits' + #Get records of ticket_audits def get_objects(self, ticket_id): url = self.endpoint.format(self.config['subdomain'], ticket_id) pages = http.get_offset_based(url, self.config['access_token']) @@ -384,6 +412,7 @@ class TicketMetrics(CursorBasedStream): endpoint = 'https://{}.zendesk.com/api/v2/tickets/{}/metrics' item_key = 'ticket_metric' + #Get records of ticket_metrics def sync(self, ticket_id): # Only 1 ticket metric per ticket url = self.endpoint.format(self.config['subdomain'], ticket_id) @@ -400,6 +429,7 @@ class TicketComments(Stream): endpoint = "https://{}.zendesk.com/api/v2/tickets/{}/comments.json" item_key='comments' + #Get records of ticket_comments def get_objects(self, ticket_id): url = self.endpoint.format(self.config['subdomain'], ticket_id) pages = http.get_offset_based(url, self.config['access_token']) @@ -425,9 +455,13 @@ def sync(self, state): bookmark = self.get_bookmark(state) epoch_bookmark = int(bookmark.timestamp()) params = {'start_time': epoch_bookmark} + #Get records of satisfaction_ratings ratings = self.get_objects(params=params) for rating in ratings: if utils.strptime_with_tz(rating['updated_at']) >= bookmark: + # NB: We don't trust that the records come back ordered by + # updated_at (we've observed out-of-order records), + # so we can't save state until we've seen all records self.update_bookmark(state, rating['updated_at']) yield (self.stream, rating) @@ -442,6 +476,7 @@ class Groups(CursorBasedStream): def sync(self, state): bookmark = self.get_bookmark(state) + #Get records of groups stream groups = self.get_objects() for group in groups: if utils.strptime_with_tz(group['updated_at']) >= bookmark: @@ -461,6 +496,7 @@ class Macros(CursorBasedStream): def sync(self, state): bookmark = self.get_bookmark(state) + #Get records of macros macros = self.get_objects() for macro in macros: if utils.strptime_with_tz(macro['updated_at']) >= bookmark: @@ -478,6 +514,7 @@ class Tags(CursorBasedStream): item_key = 'tags' def sync(self, state): # pylint: disable=unused-argument + #Get records of tags tags = self.get_objects() for tag in tags: @@ -493,6 +530,7 @@ class TicketFields(CursorBasedStream): def sync(self, state): bookmark = self.get_bookmark(state) + #Get records of ticket_fields fields = self.get_objects() for field in fields: if utils.strptime_with_tz(field['updated_at']) >= bookmark: @@ -510,6 +548,7 @@ class TicketForms(Stream): def sync(self, state): bookmark = self.get_bookmark(state) + #Get records of ticket_forms forms = self.client.ticket_forms() for form in forms: if utils.strptime_with_tz(form.updated_at) >= bookmark: @@ -529,6 +568,7 @@ class GroupMemberships(CursorBasedStream): def sync(self, state): bookmark = self.get_bookmark(state) + #Get records of group_membership memberships = self.get_objects() for membership in memberships: diff --git a/test/unittests/test_http.py b/test/unittests/test_http.py index 2328f55..1bdfb29 100644 --- a/test/unittests/test_http.py +++ b/test/unittests/test_http.py @@ -38,11 +38,13 @@ def test_get_cursor_based_gets_one_page(self, mock_get): responses = [response for response in http.get_cursor_based(url='some_url', access_token='some_token')] actual_response = responses[0] + #Verify actual response of cursor based gets is equall to SINGLE_RESPONSE self.assertDictEqual(SINGLE_RESPONSE, actual_response) expected_call_count = 1 actual_call_count = mock_get.call_count + #Verify actual_call_count is only 1 self.assertEqual(expected_call_count, actual_call_count) @patch('requests.get', @@ -55,13 +57,16 @@ def test_get_cursor_based_can_paginate(self, mock_get): for response in http.get_cursor_based(url='some_url', access_token='some_token')] + #Verify response of 1st call have expected pagination attribute self.assertDictEqual({"key1": "val1", **PAGINATE_RESPONSE}, responses[0]) + #Verifi response of 2nd call has expected SINGLE_RESPONSE self.assertDictEqual({"key2": "val2", **SINGLE_RESPONSE}, responses[1]) expected_call_count = 2 actual_call_count = mock_get.call_count + #Verify actual call count of api is 2 self.assertEqual(expected_call_count, actual_call_count) @@ -85,4 +90,5 @@ def test_get_cursor_based_handles_429(self, mock_get): expected_call_count = 3 actual_call_count = mock_get.call_count + #Verify get_cursor_based can retry 429 error and actual_call_count is expected. self.assertEqual(expected_call_count, actual_call_count) diff --git a/test/unittests/test_init.py b/test/unittests/test_init.py index 28b6740..34fca5e 100644 --- a/test/unittests/test_init.py +++ b/test/unittests/test_init.py @@ -8,16 +8,21 @@ class TestGetSession(unittest.TestCase): """ def test_no_partner_info_returns_none(self): test_session = get_session({}) + #Verify test_session is None when no partner info passed self.assertEqual(test_session, None) def test_incomplete_partner_info_returns_none(self): test_session = get_session({"marketplace_name": "Hithere"}) + #Verify test_session is None when incomplete partner info passed self.assertEqual(test_session, None) def test_adds_headers_when_all_present_in_config(self): test_session = get_session({"marketplace_name": "Hithere", "marketplace_organization_id": 1234, "marketplace_app_id": 12345}) + #Verify marketplace_name is as expected return from get_session dict. self.assertEqual("Hithere", test_session.headers.get("X-Zendesk-Marketplace-Name")) + #Verify marketplace_organization_id as expected return from get_session dict. self.assertEqual("1234", test_session.headers.get("X-Zendesk-Marketplace-Organization-Id")) + #Verify marketplace_app_id as expected return from get_session dict. self.assertEqual("12345", test_session.headers.get("X-Zendesk-Marketplace-App-Id")) From c1a3606e35be633c917121e7718f3c812793a1ec Mon Sep 17 00:00:00 2001 From: prijendev Date: Fri, 8 Oct 2021 14:37:31 +0530 Subject: [PATCH 2/5] Resolved pylint error --- tap_zendesk/streams.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tap_zendesk/streams.py b/tap_zendesk/streams.py index 0d911aa..9b3bafd 100644 --- a/tap_zendesk/streams.py +++ b/tap_zendesk/streams.py @@ -286,7 +286,7 @@ def _buffer_record(self, record): #Create buffer list if self.buf.get(stream_name) is None: self.buf[stream_name] = [] - #Append record to buffer + #Append record to buffer self.buf[stream_name].append(record) #If buffer time elapsed by 60 seconds then return true From ad8b08f7e9b899e86212f260606442ceae87eb73 Mon Sep 17 00:00:00 2001 From: prijendev Date: Mon, 18 Oct 2021 16:33:21 +0530 Subject: [PATCH 3/5] Updated spell --- test/unittests/test_http.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unittests/test_http.py b/test/unittests/test_http.py index 1bdfb29..905fb22 100644 --- a/test/unittests/test_http.py +++ b/test/unittests/test_http.py @@ -60,7 +60,7 @@ def test_get_cursor_based_can_paginate(self, mock_get): #Verify response of 1st call have expected pagination attribute self.assertDictEqual({"key1": "val1", **PAGINATE_RESPONSE}, responses[0]) - #Verifi response of 2nd call has expected SINGLE_RESPONSE + #Verify response of 2nd call has expected SINGLE_RESPONSE self.assertDictEqual({"key2": "val2", **SINGLE_RESPONSE}, responses[1]) From fc8524061b850b839109c4c8200cbf92ded1f1d8 Mon Sep 17 00:00:00 2001 From: prijendev Date: Thu, 21 Oct 2021 15:19:57 +0530 Subject: [PATCH 4/5] added coverage report --- .circleci/config.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0cf1a65..7430186 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -14,12 +14,19 @@ jobs: virtualenv -p python3 /usr/local/share/virtualenvs/tap-zendesk source /usr/local/share/virtualenvs/tap-zendesk/bin/activate pip install .[test] + pip install coverage - run: name: 'pylint' command: | source /usr/local/share/virtualenvs/tap-zendesk/bin/activate - make test + pylint tap_zendesk -d missing-docstring,invalid-name,line-too-long,too-many-locals,too-few-public-methods,fixme,stop-iteration-return,too-many-branches,useless-import-alias,no-else-return,logging-not-lazy + nosetests --with-coverage --cover-erase --cover-package=tap_zendesk --cover-html-dir=htmlcov test/unittests + coverage html - add_ssh_keys + - store_test_results: + path: test_output/report.xml + - store_artifacts: + path: htmlcov - run: name: 'Integration Tests' command: | From 4a65528d4031f5bbf790d40e3af2cb3f05efe9ed Mon Sep 17 00:00:00 2001 From: prijendev Date: Thu, 28 Oct 2021 14:07:36 +0530 Subject: [PATCH 5/5] Updated spell mistake --- tap_zendesk/__init__.py | 2 +- tap_zendesk/streams.py | 2 +- test/unittests/test_http.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tap_zendesk/__init__.py b/tap_zendesk/__init__.py index d8102c2..fffd8fe 100755 --- a/tap_zendesk/__init__.py +++ b/tap_zendesk/__init__.py @@ -113,7 +113,7 @@ def do_sync(client, catalog, state, config): for stream in catalog.streams: stream_name = stream.tap_stream_id mdata = metadata.to_map(stream.metadata) - #If stream does not selected then skip it. + # If the stream is not selected then skip it. if stream_name not in selected_stream_names: LOGGER.info("%s: Skipping - not selected", stream_name) continue diff --git a/tap_zendesk/streams.py b/tap_zendesk/streams.py index 9b3bafd..eb2e3ed 100644 --- a/tap_zendesk/streams.py +++ b/tap_zendesk/streams.py @@ -259,7 +259,7 @@ def sync(self, state): # Assumes that the for loop got everything singer.write_state(state) - #If search_window_size less or equall than half of original window size then make it double. + # If search_window_size is less or equal to half of the original window size, then make it double. if search_window_size <= original_search_window_size // 2: search_window_size = search_window_size * 2 LOGGER.info("Successfully requested records. Doubling search window to %s seconds", search_window_size) diff --git a/test/unittests/test_http.py b/test/unittests/test_http.py index 905fb22..1dbc6be 100644 --- a/test/unittests/test_http.py +++ b/test/unittests/test_http.py @@ -38,7 +38,7 @@ def test_get_cursor_based_gets_one_page(self, mock_get): responses = [response for response in http.get_cursor_based(url='some_url', access_token='some_token')] actual_response = responses[0] - #Verify actual response of cursor based gets is equall to SINGLE_RESPONSE + # Verify actual response of cursor-based gets is equal to SINGLE_RESPONSE self.assertDictEqual(SINGLE_RESPONSE, actual_response)