Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updates to ActionKit, Braintree, and S3 connectors #754

Merged
merged 23 commits into from
Oct 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
c75d3ef
Create Google Admin connector
crayolakat Mar 22, 2022
0fa5763
Merge branch 'master' into moveon
crayolakat Mar 22, 2022
825f489
Update setup_google_application_credentials to support dict parameter
technicalex Apr 1, 2022
8584408
Merge pull request #9 from MoveOnOrg/alex-support-dict-param
technicalex Apr 5, 2022
2e2047d
Update authorization library for Google Sheets to support account del…
sjwmoveon Apr 8, 2022
ad77645
Typo fix
sjwmoveon Apr 8, 2022
5633286
Merge pull request #10 from MoveOnOrg/gsheets
sjwmoveon Apr 8, 2022
29cf819
Merge pull request #11 from MoveOnOrg/gsheets
sjwmoveon Apr 11, 2022
1d292b9
Don't auto-grab session token if it's not passed in
sjwmoveon Aug 5, 2022
9e493c3
Merge pull request #17 from MoveOnOrg/sjwmoveon-patch-1
sjwmoveon Aug 5, 2022
7e3f053
Add new general object search methods to ActionKit
sjwmoveon Aug 12, 2022
04bd69d
Remove unused import
sjwmoveon Aug 16, 2022
99cb2f6
Add subscription search to Braintree
sjwmoveon Aug 24, 2022
8a6ca72
Merge branch 'moveon' of https://github.com/MoveOnOrg/parsons into mo…
sjwmoveon Aug 24, 2022
b4a8525
Merge branch 'moveon' into braintree_subs
sjwmoveon Aug 24, 2022
24ea3cb
Merge pull request #19 from MoveOnOrg/braintree_subs
sjwmoveon Aug 26, 2022
5d4e07c
Merge pull request #20 from MoveOnOrg/ak_search
sjwmoveon Sep 6, 2022
6e108a0
Merge branch 'main' into moveon_update
crayolakat Sep 16, 2022
698697c
Merge branch 'main' into moveon_update
crayolakat Sep 27, 2022
c705352
Merge pull request #21 from MoveOnOrg/moveon_update
crayolakat Sep 27, 2022
ca35938
Add os import
crayolakat Sep 27, 2022
63fe054
Adhere to flake8 linting rules
crayolakat Sep 27, 2022
4b9de69
Fix typo
crayolakat Sep 27, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 112 additions & 22 deletions parsons/action_kit/action_kit.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def _base_get(self, endpoint, entity_id=None, exception_message=None, params=Non
resp = self.conn.get(self._base_endpoint(endpoint, entity_id), params=params)
if exception_message and resp.status_code == 404:
raise Exception(self.parse_error(resp, exception_message))

return resp.json()

def _base_post(self, endpoint, exception_message, return_full_json=False, **kwargs):
Expand Down Expand Up @@ -186,28 +185,12 @@ def get_events(self, limit=None, **kwargs):

.. code-block:: python

ak.get_events(fields__name__contains="FirstName")
ak.get_events(name__contains="FirstName")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this backwards-incompatible or was the doc just incorrect?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is fixing the documentation but not sure. Tagging @sjwmoveon since she's the author of this change.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, the documentation was incorrect.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

okay, great. I'm going to go ahead and approve + merge this. thanks for the contribution @sjwmoveon and @crayolakat and thanks for the review @schuyler1d

`Returns:`
Parsons.Table
The events data.
"""
# "The maximum number of objects returned per request is 100. Use paging
# to get more objects."
# (https://roboticdogs.actionkit.com/docs//manual/api/rest/overview.html#ordering)
# get `limit` events if it's provided, otherwise get 100
kwargs["_limit"] = min(100, limit or 1_000_000_000)
json_data = self._base_get("event", params=kwargs)
data = json_data["objects"]

next_url = json_data.get("meta", {}).get("next")
while next_url:
resp = self.conn.get(f'https://{self.domain}{next_url}')
data += resp.json().get("objects", [])
next_url = resp.json().get("meta", {}).get("next")
if limit and len(data) >= limit:
break

return Table(data[:limit])
return self.paginated_get('event', limit=limit, **kwargs)

def update_event(self, event_id, **kwargs):
"""
Expand Down Expand Up @@ -591,6 +574,113 @@ def queue_mailer(self, mailing_id):
return self._base_post(endpoint='mailer/' + str(mailing_id) + '/queue',
exception_message='Could not queue mailer')

def paginated_get(self, object_type, limit=None, **kwargs):
"""Get multiple objects of a given type.

`Args:`
object_type: string
The type of object to search for.
limit: int
The number of objects to return. If omitted, all objects are returned.
**kwargs:
Optional arguments to pass to the client. A full list can be found
in the `ActionKit API Documentation <https://roboticdogs.actionkit.com/docs/\
manual/api/rest/actionprocessing.html>`_.

Additionally, expressions to filter the data can also be provided. For addition
info, visit `Django's docs on field lookups <https://docs.djangoproject.com/\
en/3.1/topics/db/queries/#field-lookups>`_.

.. code-block:: python

ak.paginated_get(name__contains="FirstName")
`Returns:`
Parsons.Table
The objects data.
"""
# "The maximum number of objects returned per request is 100. Use paging
# to get more objects."
# (https://roboticdogs.actionkit.com/docs//manual/api/rest/overview.html#ordering)
# get only `limit` objects if it's below 100, otherwise get 100 at a time
kwargs["_limit"] = min(100, limit or 1_000_000_000)
json_data = self._base_get(object_type, params=kwargs)
data = json_data["objects"]

next_url = json_data.get("meta", {}).get("next")
while next_url:
resp = self.conn.get(f'https://{self.domain}{next_url}')
data.extend(resp.json().get("objects", []))
next_url = resp.json().get("meta", {}).get("next")
if limit and len(data) >= limit:
break

return Table(data[:limit])

def paginated_get_custom_limit(self, object_type, limit=None,
threshold_field=None, threshold_value=None,
ascdesc='asc', **kwargs):
"""Get multiple objects of a given type, stopping based on the value of a field.

`Args:`
object_type: string
The type of object to search for.
limit: int
The maximum number of objects to return. Even if the threshold
value is not reached, if the limit is set, then at most this many
objects will be returned.
threshold_field: string
The field used to determine when to stop.
Must be one of the options for ordering by.
threshold_value: string
The value of the field to stop at.
ascdesc: string
If "asc" (the default), return all objects below the threshold value.
If "desc", return all objects above the threshold value.
**kwargs:
You can also add expressions to filter the data beyond the limit/threshold values
above. For additional info, visit `Django's docs on field lookups
<https://docs.djangoproject.com/en/3.1/topics/db/queries/#field-lookups>`_.

.. code-block:: python

ak.paginated_get(name__contains="FirstName")
`Returns:`
Parsons.Table
The objects data.
"""
# "The maximum number of objects returned per request is 100. Use paging
# to get more objects."
# (https://roboticdogs.actionkit.com/docs//manual/api/rest/overview.html#ordering)
kwargs["_limit"] = min(100, limit or 1_000_000_000)
if ascdesc == "asc":
kwargs["order_by"] = threshold_field
else:
kwargs["order_by"] = "-" + threshold_field
json_data = self._base_get(object_type, params=kwargs)
data = json_data["objects"]
next_url = json_data.get("meta", {}).get("next")
while next_url:
last = data[-1].get(threshold_field)
if ascdesc == "asc" and last > threshold_value:
break
if ascdesc == "desc" and last < threshold_value:
break
resp = self.conn.get(f'https://{self.domain}{next_url}')
data += resp.json().get("objects", [])
next_url = resp.json().get("meta", {}).get("next")
if limit and len(data) >= limit:
break
# This could be more efficient but it's still O(n) so no big deal
i = len(data) - 1 # start at the end; 0-indexed means the end is length - 1
if ascdesc == "asc":
while data[i].get(threshold_field) > threshold_value:
i = i - 1
else:
while data[i].get(threshold_field) < threshold_value:
i = i - 1
data = data[:i]
return Table(data[:limit])

def update_order(self, order_id, **kwargs):
"""
Update an order.
Expand Down Expand Up @@ -731,7 +821,7 @@ def create_generic_action(self, page, email=None, ak_id=None, **kwargs):
`Returns`:
dict
The response json
""" # noqa: E501,E261
""" # noqa: E501,E261

if not email or ak_id:
raise ValueError('One of email or ak_id is required.')
Expand Down Expand Up @@ -774,7 +864,7 @@ def bulk_upload_csv(self, csv_file, import_page,
success: whether upload was successful
progress_url: an API URL to get progress on upload processing
res: requests http response object
""" # noqa: E501,E261
""" # noqa: E501,E261

# self.conn defaults to JSON, but this has to be form/multi-part....
upload_client = self._conn({'accepts': 'application/json'})
Expand Down Expand Up @@ -837,7 +927,7 @@ def bulk_upload_table(self, table, import_page, autocreate_user_fields=0,
success: bool -- whether upload was successful (individual rows may not have been)
results: [dict] -- This is a list of the full results.
progress_url and res for any results
""" # noqa: E501,E261
""" # noqa: E501,E261

import_page = check_env.check('ACTION_KIT_IMPORTPAGE', import_page)
upload_tables = self._split_tables_no_empties(
Expand Down
Loading